// Copyright 2015-2021 The Khronos Group Inc.
//
// SPDX-License-Identifier: Apache-2.0 OR MIT
//

// This header is generated from the Khronos Vulkan XML API Registry.

#ifndef VULKAN_HPP
#define VULKAN_HPP

#if defined( _MSVC_LANG )
#  define VULKAN_HPP_CPLUSPLUS _MSVC_LANG
#else
#  define VULKAN_HPP_CPLUSPLUS __cplusplus
#endif

#if 201703L < VULKAN_HPP_CPLUSPLUS
#  define VULKAN_HPP_CPP_VERSION 20
#elif 201402L < VULKAN_HPP_CPLUSPLUS
#  define VULKAN_HPP_CPP_VERSION 17
#elif 201103L < VULKAN_HPP_CPLUSPLUS
#  define VULKAN_HPP_CPP_VERSION 14
#elif 199711L < VULKAN_HPP_CPLUSPLUS
#  define VULKAN_HPP_CPP_VERSION 11
#else
#  error "vulkan.hpp needs at least c++ standard version 11"
#endif

#include <algorithm>
#include <array>
#include <cstddef>
#include <cstdint>
#include <cstring>
#include <functional>
#include <initializer_list>
#include <sstream>
#include <string>
#include <system_error>
#include <tuple>
#include <type_traits>
#include <vulkan/vulkan.h>

#if 17 <= VULKAN_HPP_CPP_VERSION
#  include <string_view>
#endif

#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
#  if !defined( VULKAN_HPP_NO_SMART_HANDLE )
#    define VULKAN_HPP_NO_SMART_HANDLE
#  endif
#else
#  include <memory>
#  include <vector>
#endif

#if !defined( VULKAN_HPP_ASSERT )
#  include <cassert>
#  define VULKAN_HPP_ASSERT assert
#endif

#if !defined( VULKAN_HPP_ASSERT_ON_RESULT )
#  define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT
#endif

#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL )
#  define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
#endif

#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
#    include <dlfcn.h>
#  elif defined( _WIN32 )
typedef struct HINSTANCE__ * HINSTANCE;
#    if defined( _WIN64 )
typedef int64_t( __stdcall * FARPROC )();
#    else
typedef int( __stdcall * FARPROC )();
#    endif
extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName );
extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule );
extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName );
#  endif
#endif

#if !defined( __has_include )
#  define __has_include( x ) false
#endif

#if ( 201711 <= __cpp_impl_three_way_comparison ) && __has_include( <compare> ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR )
#  define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
#endif
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
#  include <compare>
#endif

static_assert( VK_HEADER_VERSION == 180, "Wrong VK_HEADER_VERSION!" );

// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
#if ( VK_USE_64_BIT_PTR_DEFINES == 1 )
#  if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
#    define VULKAN_HPP_TYPESAFE_CONVERSION
#  endif
#endif

// <tuple> includes <sys/sysmacros.h> through some other header
// this results in major(x) being resolved to gnu_dev_major(x)
// which is an expression in a constructor initializer list.
#if defined( major )
#  undef major
#endif
#if defined( minor )
#  undef minor
#endif

// Windows defines MemoryBarrier which is deprecated and collides
// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct.
#if defined( MemoryBarrier )
#  undef MemoryBarrier
#endif

#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS )
#  if defined( __clang__ )
#    if __has_feature( cxx_unrestricted_unions )
#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
#    endif
#  elif defined( __GNUC__ )
#    define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ )
#    if 40600 <= GCC_VERSION
#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
#    endif
#  elif defined( _MSC_VER )
#    if 1900 <= _MSC_VER
#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
#    endif
#  endif
#endif

#if !defined( VULKAN_HPP_INLINE )
#  if defined( __clang__ )
#    if __has_attribute( always_inline )
#      define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
#    else
#      define VULKAN_HPP_INLINE inline
#    endif
#  elif defined( __GNUC__ )
#    define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
#  elif defined( _MSC_VER )
#    define VULKAN_HPP_INLINE inline
#  else
#    define VULKAN_HPP_INLINE inline
#  endif
#endif

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
#  define VULKAN_HPP_TYPESAFE_EXPLICIT
#else
#  define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
#endif

#if defined( __cpp_constexpr )
#  define VULKAN_HPP_CONSTEXPR constexpr
#  if __cpp_constexpr >= 201304
#    define VULKAN_HPP_CONSTEXPR_14 constexpr
#  else
#    define VULKAN_HPP_CONSTEXPR_14
#  endif
#  define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr
#else
#  define VULKAN_HPP_CONSTEXPR
#  define VULKAN_HPP_CONSTEXPR_14
#  define VULKAN_HPP_CONST_OR_CONSTEXPR const
#endif

#if !defined( VULKAN_HPP_NOEXCEPT )
#  if defined( _MSC_VER ) && ( _MSC_VER <= 1800 )
#    define VULKAN_HPP_NOEXCEPT
#  else
#    define VULKAN_HPP_NOEXCEPT noexcept
#    define VULKAN_HPP_HAS_NOEXCEPT 1
#    if defined( VULKAN_HPP_NO_EXCEPTIONS )
#      define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept
#    else
#      define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
#    endif
#  endif
#endif

#if 14 <= VULKAN_HPP_CPP_VERSION
#  define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]]
#else
#  define VULKAN_HPP_DEPRECATED( msg )
#endif

#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS )
#  define VULKAN_HPP_NODISCARD [[nodiscard]]
#  if defined( VULKAN_HPP_NO_EXCEPTIONS )
#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]]
#  else
#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
#  endif
#else
#  define VULKAN_HPP_NODISCARD
#  define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
#endif

#if !defined( VULKAN_HPP_NAMESPACE )
#  define VULKAN_HPP_NAMESPACE vk
#endif

#define VULKAN_HPP_STRINGIFY2( text ) #text
#define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text )
#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE )

namespace VULKAN_HPP_NAMESPACE
{
#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
  template <typename T>
  class ArrayProxy
  {
  public:
    VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT
      : m_count( 0 )
      , m_ptr( nullptr )
    {}

    VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
      : m_count( 0 )
      , m_ptr( nullptr )
    {}

    ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT
      : m_count( 1 )
      , m_ptr( &value )
    {}

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxy( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
      : m_count( 1 )
      , m_ptr( &value )
    {}

    ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
      : m_count( count )
      , m_ptr( ptr )
    {}

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxy( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
      : m_count( count )
      , m_ptr( ptr )
    {}

#  if __GNUC__ >= 9
#    pragma GCC diagnostic push
#    pragma GCC diagnostic ignored "-Winit-list-lifetime"
#  endif

    ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( list.size() ) )
      , m_ptr( list.begin() )
    {}

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( list.size() ) )
      , m_ptr( list.begin() )
    {}

    ArrayProxy( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( list.size() ) )
      , m_ptr( list.begin() )
    {}

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( list.size() ) )
      , m_ptr( list.begin() )
    {}

#  if __GNUC__ >= 9
#    pragma GCC diagnostic pop
#  endif

    template <size_t N>
    ArrayProxy( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
      : m_count( N )
      , m_ptr( data.data() )
    {}

    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxy( std::array<typename std::remove_const<T>::type, N> const & data ) VULKAN_HPP_NOEXCEPT
      : m_count( N )
      , m_ptr( data.data() )
    {}

    template <size_t N>
    ArrayProxy( std::array<T, N> & data ) VULKAN_HPP_NOEXCEPT
      : m_count( N )
      , m_ptr( data.data() )
    {}

    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxy( std::array<typename std::remove_const<T>::type, N> & data ) VULKAN_HPP_NOEXCEPT
      : m_count( N )
      , m_ptr( data.data() )
    {}

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
    ArrayProxy( std::vector<T, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( data.size() ) )
      , m_ptr( data.data() )
    {}

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
              typename B      = T,
              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxy( std::vector<typename std::remove_const<T>::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( data.size() ) )
      , m_ptr( data.data() )
    {}

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
    ArrayProxy( std::vector<T, Allocator> & data ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( data.size() ) )
      , m_ptr( data.data() )
    {}

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
              typename B      = T,
              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxy( std::vector<typename std::remove_const<T>::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( data.size() ) )
      , m_ptr( data.data() )
    {}

    const T * begin() const VULKAN_HPP_NOEXCEPT
    {
      return m_ptr;
    }

    const T * end() const VULKAN_HPP_NOEXCEPT
    {
      return m_ptr + m_count;
    }

    const T & front() const VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( m_count && m_ptr );
      return *m_ptr;
    }

    const T & back() const VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( m_count && m_ptr );
      return *( m_ptr + m_count - 1 );
    }

    bool empty() const VULKAN_HPP_NOEXCEPT
    {
      return ( m_count == 0 );
    }

    uint32_t size() const VULKAN_HPP_NOEXCEPT
    {
      return m_count;
    }

    T * data() const VULKAN_HPP_NOEXCEPT
    {
      return m_ptr;
    }

  private:
    uint32_t m_count;
    T *      m_ptr;
  };

  template <typename T>
  class ArrayProxyNoTemporaries
  {
  public:
    VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT
      : m_count( 0 )
      , m_ptr( nullptr )
    {}

    VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
      : m_count( 0 )
      , m_ptr( nullptr )
    {}

    ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT
      : m_count( 1 )
      , m_ptr( &value )
    {}

    ArrayProxyNoTemporaries( T && value ) = delete;

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
      : m_count( 1 )
      , m_ptr( &value )
    {}

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( typename std::remove_const<T>::type && value ) = delete;

    ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
      : m_count( count )
      , m_ptr( ptr )
    {}

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
      : m_count( count )
      , m_ptr( ptr )
    {}

    ArrayProxyNoTemporaries( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( list.size() ) )
      , m_ptr( list.begin() )
    {}

    ArrayProxyNoTemporaries( std::initializer_list<T> const && list ) = delete;

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const & list )
      VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( list.size() ) )
      , m_ptr( list.begin() )
    {}

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const && list ) = delete;

    ArrayProxyNoTemporaries( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( list.size() ) )
      , m_ptr( list.begin() )
    {}

    ArrayProxyNoTemporaries( std::initializer_list<T> && list ) = delete;

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( list.size() ) )
      , m_ptr( list.begin() )
    {}

    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> && list ) = delete;

    template <size_t N>
    ArrayProxyNoTemporaries( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
      : m_count( N )
      , m_ptr( data.data() )
    {}

    template <size_t N>
    ArrayProxyNoTemporaries( std::array<T, N> const && data ) = delete;

    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> const & data ) VULKAN_HPP_NOEXCEPT
      : m_count( N )
      , m_ptr( data.data() )
    {}

    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> const && data ) = delete;

    template <size_t N>
    ArrayProxyNoTemporaries( std::array<T, N> & data ) VULKAN_HPP_NOEXCEPT
      : m_count( N )
      , m_ptr( data.data() )
    {}

    template <size_t N>
    ArrayProxyNoTemporaries( std::array<T, N> && data ) = delete;

    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> & data ) VULKAN_HPP_NOEXCEPT
      : m_count( N )
      , m_ptr( data.data() )
    {}

    template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> && data ) = delete;

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
    ArrayProxyNoTemporaries( std::vector<T, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( data.size() ) )
      , m_ptr( data.data() )
    {}

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
    ArrayProxyNoTemporaries( std::vector<T, Allocator> const && data ) = delete;

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
              typename B      = T,
              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> const & data )
      VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( data.size() ) )
      , m_ptr( data.data() )
    {}

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
              typename B      = T,
              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> const && data ) = delete;

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
    ArrayProxyNoTemporaries( std::vector<T, Allocator> & data ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( data.size() ) )
      , m_ptr( data.data() )
    {}

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
    ArrayProxyNoTemporaries( std::vector<T, Allocator> && data ) = delete;

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
              typename B      = T,
              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT
      : m_count( static_cast<uint32_t>( data.size() ) )
      , m_ptr( data.data() )
    {}

    template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
              typename B      = T,
              typename std::enable_if<std::is_const<B>::value, int>::type = 0>
    ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> && data ) = delete;

    const T * begin() const VULKAN_HPP_NOEXCEPT
    {
      return m_ptr;
    }

    const T * end() const VULKAN_HPP_NOEXCEPT
    {
      return m_ptr + m_count;
    }

    const T & front() const VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( m_count && m_ptr );
      return *m_ptr;
    }

    const T & back() const VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( m_count && m_ptr );
      return *( m_ptr + m_count - 1 );
    }

    bool empty() const VULKAN_HPP_NOEXCEPT
    {
      return ( m_count == 0 );
    }

    uint32_t size() const VULKAN_HPP_NOEXCEPT
    {
      return m_count;
    }

    T * data() const VULKAN_HPP_NOEXCEPT
    {
      return m_ptr;
    }

  private:
    uint32_t m_count;
    T *      m_ptr;
  };
#endif

  template <typename T, size_t N>
  class ArrayWrapper1D : public std::array<T, N>
  {
  public:
    VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array<T, N>() {}

    VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT : std::array<T, N>( data )
    {}

#if defined( _WIN32 ) && !defined( _WIN64 )
    VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT
    {
      return std::array<T, N>::operator[]( index );
    }

    T & operator[]( int index ) VULKAN_HPP_NOEXCEPT
    {
      return std::array<T, N>::operator[]( index );
    }
#endif

    operator T const *() const VULKAN_HPP_NOEXCEPT
    {
      return this->data();
    }

    operator T *() VULKAN_HPP_NOEXCEPT
    {
      return this->data();
    }

    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
    operator std::string() const
    {
      return std::string( this->data() );
    }

#if 17 <= VULKAN_HPP_CPP_VERSION
    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
    operator std::string_view() const
    {
      return std::string_view( this->data() );
    }
#endif

    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
    bool operator<( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return *static_cast<std::array<char, N> const *>( this ) < *static_cast<std::array<char, N> const *>( &rhs );
    }

    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
    bool operator<=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return *static_cast<std::array<char, N> const *>( this ) <= *static_cast<std::array<char, N> const *>( &rhs );
    }

    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
    bool operator>( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return *static_cast<std::array<char, N> const *>( this ) > *static_cast<std::array<char, N> const *>( &rhs );
    }

    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
    bool operator>=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return *static_cast<std::array<char, N> const *>( this ) >= *static_cast<std::array<char, N> const *>( &rhs );
    }

    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
    bool operator==( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return *static_cast<std::array<char, N> const *>( this ) == *static_cast<std::array<char, N> const *>( &rhs );
    }

    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
    bool operator!=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs );
    }
  };

  // specialization of relational operators between std::string and arrays of chars
  template <size_t N>
  bool operator<( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
  {
    return lhs < rhs.data();
  }

  template <size_t N>
  bool operator<=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
  {
    return lhs <= rhs.data();
  }

  template <size_t N>
  bool operator>( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
  {
    return lhs > rhs.data();
  }

  template <size_t N>
  bool operator>=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
  {
    return lhs >= rhs.data();
  }

  template <size_t N>
  bool operator==( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
  {
    return lhs == rhs.data();
  }

  template <size_t N>
  bool operator!=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
  {
    return lhs != rhs.data();
  }

  template <typename T, size_t N, size_t M>
  class ArrayWrapper2D : public std::array<ArrayWrapper1D<T, M>, N>
  {
  public:
    VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT : std::array<ArrayWrapper1D<T, M>, N>() {}

    VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array<std::array<T, M>, N> const & data ) VULKAN_HPP_NOEXCEPT
      : std::array<ArrayWrapper1D<T, M>, N>( *reinterpret_cast<std::array<ArrayWrapper1D<T, M>, N> const *>( &data ) )
    {}
  };

  template <typename FlagBitsType>
  struct FlagTraits
  {
    enum
    {
      allFlags = 0
    };
  };

  template <typename BitType>
  class Flags
  {
  public:
    using MaskType = typename std::underlying_type<BitType>::type;

    // constructors
    VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask( 0 ) {}

    VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast<MaskType>( bit ) ) {}

    VULKAN_HPP_CONSTEXPR Flags( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT : m_mask( flags ) {}

    // relational operators
#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Flags<BitType> const & ) const = default;
#else
    VULKAN_HPP_CONSTEXPR bool operator<( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_mask < rhs.m_mask;
    }

    VULKAN_HPP_CONSTEXPR bool operator<=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_mask <= rhs.m_mask;
    }

    VULKAN_HPP_CONSTEXPR bool operator>( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_mask > rhs.m_mask;
    }

    VULKAN_HPP_CONSTEXPR bool operator>=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_mask >= rhs.m_mask;
    }

    VULKAN_HPP_CONSTEXPR bool operator==( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_mask == rhs.m_mask;
    }

    VULKAN_HPP_CONSTEXPR bool operator!=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_mask != rhs.m_mask;
    }
#endif

    // logical operator
    VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return !m_mask;
    }

    // bitwise operators
    VULKAN_HPP_CONSTEXPR Flags<BitType> operator&( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return Flags<BitType>( m_mask & rhs.m_mask );
    }

    VULKAN_HPP_CONSTEXPR Flags<BitType> operator|( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return Flags<BitType>( m_mask | rhs.m_mask );
    }

    VULKAN_HPP_CONSTEXPR Flags<BitType> operator^( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return Flags<BitType>( m_mask ^ rhs.m_mask );
    }

    VULKAN_HPP_CONSTEXPR Flags<BitType> operator~() const VULKAN_HPP_NOEXCEPT
    {
      return Flags<BitType>( m_mask ^ FlagTraits<BitType>::allFlags );
    }

    // assignment operators
    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator|=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      m_mask |= rhs.m_mask;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator&=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      m_mask &= rhs.m_mask;
      return *this;
    }

    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator^=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      m_mask ^= rhs.m_mask;
      return *this;
    }

    // cast operators
    explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return !!m_mask;
    }

    explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT
    {
      return m_mask;
    }

#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC )
  public:
#else
  private:
#endif
    MaskType m_mask;
  };

#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
  // relational operators only needed for pre C++20
  template <typename BitType>
  VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
  {
    return flags.operator>( bit );
  }

  template <typename BitType>
  VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
  {
    return flags.operator>=( bit );
  }

  template <typename BitType>
  VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
  {
    return flags.operator<( bit );
  }

  template <typename BitType>
  VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
  {
    return flags.operator<=( bit );
  }

  template <typename BitType>
  VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
  {
    return flags.operator==( bit );
  }

  template <typename BitType>
  VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
  {
    return flags.operator!=( bit );
  }
#endif

  // bitwise operators
  template <typename BitType>
  VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(BitType bit, Flags<BitType> const & flags)VULKAN_HPP_NOEXCEPT
  {
    return flags.operator&( bit );
  }

  template <typename BitType>
  VULKAN_HPP_CONSTEXPR Flags<BitType> operator|( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
  {
    return flags.operator|( bit );
  }

  template <typename BitType>
  VULKAN_HPP_CONSTEXPR Flags<BitType> operator^( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
  {
    return flags.operator^( bit );
  }

  template <typename RefType>
  class Optional
  {
  public:
    Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT
    {
      m_ptr = &reference;
    }
    Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT
    {
      m_ptr = ptr;
    }
    Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_ptr = nullptr;
    }

    operator RefType *() const VULKAN_HPP_NOEXCEPT
    {
      return m_ptr;
    }
    RefType const * operator->() const VULKAN_HPP_NOEXCEPT
    {
      return m_ptr;
    }
    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return !!m_ptr;
    }

  private:
    RefType * m_ptr;
  };

  template <typename X, typename Y>
  struct StructExtends
  {
    enum
    {
      value = false
    };
  };

  template <typename Type, class...>
  struct IsPartOfStructureChain
  {
    static const bool valid = false;
  };

  template <typename Type, typename Head, typename... Tail>
  struct IsPartOfStructureChain<Type, Head, Tail...>
  {
    static const bool valid = std::is_same<Type, Head>::value || IsPartOfStructureChain<Type, Tail...>::valid;
  };

  template <size_t Index, typename T, typename... ChainElements>
  struct StructureChainContains
  {
    static const bool value =
      std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
      StructureChainContains<Index - 1, T, ChainElements...>::value;
  };

  template <typename T, typename... ChainElements>
  struct StructureChainContains<0, T, ChainElements...>
  {
    static const bool value =
      std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
  };

  template <size_t Index, typename... ChainElements>
  struct StructureChainValidation
  {
    using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
    static const bool valid =
      StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
      ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) &&
      StructureChainValidation<Index - 1, ChainElements...>::valid;
  };

  template <typename... ChainElements>
  struct StructureChainValidation<0, ChainElements...>
  {
    static const bool valid = true;
  };

  template <typename... ChainElements>
  class StructureChain : public std::tuple<ChainElements...>
  {
  public:
    StructureChain() VULKAN_HPP_NOEXCEPT
    {
      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
                     "The structure chain is not valid!" );
      link<sizeof...( ChainElements ) - 1>();
    }

    StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( rhs )
    {
      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
                     "The structure chain is not valid!" );
      link<sizeof...( ChainElements ) - 1>();
    }

    StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT
      : std::tuple<ChainElements...>( std::forward<std::tuple<ChainElements...>>( rhs ) )
    {
      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
                     "The structure chain is not valid!" );
      link<sizeof...( ChainElements ) - 1>();
    }

    StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( elems... )
    {
      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
                     "The structure chain is not valid!" );
      link<sizeof...( ChainElements ) - 1>();
    }

    StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      std::tuple<ChainElements...>::operator=( rhs );
      link<sizeof...( ChainElements ) - 1>();
      return *this;
    }

    StructureChain & operator=( StructureChain && rhs ) = delete;

    template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
    T & get() VULKAN_HPP_NOEXCEPT
    {
      return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(
        static_cast<std::tuple<ChainElements...> &>( *this ) );
    }

    template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
    T const & get() const VULKAN_HPP_NOEXCEPT
    {
      return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(
        static_cast<std::tuple<ChainElements...> const &>( *this ) );
    }

    template <typename T0, typename T1, typename... Ts>
    std::tuple<T0 &, T1 &, Ts &...> get() VULKAN_HPP_NOEXCEPT
    {
      return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
    }

    template <typename T0, typename T1, typename... Ts>
    std::tuple<T0 const &, T1 const &, Ts const &...> get() const VULKAN_HPP_NOEXCEPT
    {
      return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
    }

    template <typename ClassType, size_t Which = 0>
    typename std::enable_if<
      std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
        ( Which == 0 ),
      bool>::type
      isLinked() const VULKAN_HPP_NOEXCEPT
    {
      return true;
    }

    template <typename ClassType, size_t Which = 0>
    typename std::enable_if<
      !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
        ( Which != 0 ),
      bool>::type
      isLinked() const VULKAN_HPP_NOEXCEPT
    {
      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
                     "Can't unlink Structure that's not part of this StructureChain!" );
      return isLinked( reinterpret_cast<VkBaseInStructure const *>( &get<ClassType, Which>() ) );
    }

    template <typename ClassType, size_t Which = 0>
    typename std::enable_if<
      !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
        ( Which != 0 ),
      void>::type
      relink() VULKAN_HPP_NOEXCEPT
    {
      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
                     "Can't relink Structure that's not part of this StructureChain!" );
      auto pNext = reinterpret_cast<VkBaseInStructure *>( &get<ClassType, Which>() );
      VULKAN_HPP_ASSERT( !isLinked( pNext ) );
      auto & headElement = std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) );
      pNext->pNext       = reinterpret_cast<VkBaseInStructure const *>( headElement.pNext );
      headElement.pNext  = pNext;
    }

    template <typename ClassType, size_t Which = 0>
    typename std::enable_if<
      !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
        ( Which != 0 ),
      void>::type
      unlink() VULKAN_HPP_NOEXCEPT
    {
      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
                     "Can't unlink Structure that's not part of this StructureChain!" );
      unlink( reinterpret_cast<VkBaseOutStructure const *>( &get<ClassType, Which>() ) );
    }

  private:
    template <int Index, typename T, int Which, typename, class First, class... Types>
    struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...>
    {};

    template <int Index, typename T, int Which, class First, class... Types>
    struct ChainElementIndex<Index,
                             T,
                             Which,
                             typename std::enable_if<!std::is_same<T, First>::value, void>::type,
                             First,
                             Types...> : ChainElementIndex<Index + 1, T, Which, void, Types...>
    {};

    template <int Index, typename T, int Which, class First, class... Types>
    struct ChainElementIndex<Index,
                             T,
                             Which,
                             typename std::enable_if<std::is_same<T, First>::value, void>::type,
                             First,
                             Types...> : ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
    {};

    template <int Index, typename T, class First, class... Types>
    struct ChainElementIndex<Index,
                             T,
                             0,
                             typename std::enable_if<std::is_same<T, First>::value, void>::type,
                             First,
                             Types...> : std::integral_constant<int, Index>
    {};

    bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT
    {
      VkBaseInStructure const * elementPtr = reinterpret_cast<VkBaseInStructure const *>(
        &std::get<0>( static_cast<std::tuple<ChainElements...> const &>( *this ) ) );
      while ( elementPtr )
      {
        if ( elementPtr->pNext == pNext )
        {
          return true;
        }
        elementPtr = elementPtr->pNext;
      }
      return false;
    }

    template <size_t Index>
    typename std::enable_if<Index != 0, void>::type link() VULKAN_HPP_NOEXCEPT
    {
      auto & x = std::get<Index - 1>( static_cast<std::tuple<ChainElements...> &>( *this ) );
      x.pNext  = &std::get<Index>( static_cast<std::tuple<ChainElements...> &>( *this ) );
      link<Index - 1>();
    }

    template <size_t Index>
    typename std::enable_if<Index == 0, void>::type link() VULKAN_HPP_NOEXCEPT
    {}

    void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
    {
      VkBaseOutStructure * elementPtr =
        reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) ) );
      while ( elementPtr && ( elementPtr->pNext != pNext ) )
      {
        elementPtr = elementPtr->pNext;
      }
      if ( elementPtr )
      {
        elementPtr->pNext = pNext->pNext;
      }
      else
      {
        VULKAN_HPP_ASSERT( false );  // fires, if the ClassType member has already been unlinked !
      }
    }
  };

#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
  template <typename Type, typename Dispatch>
  class UniqueHandleTraits;

  template <typename Type, typename Dispatch>
  class UniqueHandle : public UniqueHandleTraits<Type, Dispatch>::deleter
  {
  private:
    using Deleter = typename UniqueHandleTraits<Type, Dispatch>::deleter;

  public:
    using element_type = Type;

    UniqueHandle() : Deleter(), m_value() {}

    explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
      : Deleter( deleter )
      , m_value( value )
    {}

    UniqueHandle( UniqueHandle const & ) = delete;

    UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
      : Deleter( std::move( static_cast<Deleter &>( other ) ) )
      , m_value( other.release() )
    {}

    ~UniqueHandle() VULKAN_HPP_NOEXCEPT
    {
      if ( m_value )
      {
        this->destroy( m_value );
      }
    }

    UniqueHandle & operator=( UniqueHandle const & ) = delete;

    UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
    {
      reset( other.release() );
      *static_cast<Deleter *>( this ) = std::move( static_cast<Deleter &>( other ) );
      return *this;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_value.operator bool();
    }

    Type const * operator->() const VULKAN_HPP_NOEXCEPT
    {
      return &m_value;
    }

    Type * operator->() VULKAN_HPP_NOEXCEPT
    {
      return &m_value;
    }

    Type const & operator*() const VULKAN_HPP_NOEXCEPT
    {
      return m_value;
    }

    Type & operator*() VULKAN_HPP_NOEXCEPT
    {
      return m_value;
    }

    const Type & get() const VULKAN_HPP_NOEXCEPT
    {
      return m_value;
    }

    Type & get() VULKAN_HPP_NOEXCEPT
    {
      return m_value;
    }

    void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT
    {
      if ( m_value != value )
      {
        if ( m_value )
        {
          this->destroy( m_value );
        }
        m_value = value;
      }
    }

    Type release() VULKAN_HPP_NOEXCEPT
    {
      Type value = m_value;
      m_value    = nullptr;
      return value;
    }

    void swap( UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
    {
      std::swap( m_value, rhs.m_value );
      std::swap( static_cast<Deleter &>( *this ), static_cast<Deleter &>( rhs ) );
    }

  private:
    Type m_value;
  };

  template <typename UniqueType>
  VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type>
                    uniqueToRaw( std::vector<UniqueType> const & handles )
  {
    std::vector<typename UniqueType::element_type> newBuffer( handles.size() );
    std::transform(
      handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } );
    return newBuffer;
  }

  template <typename Type, typename Dispatch>
  VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs,
                               UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
  {
    lhs.swap( rhs );
  }
#endif

#if !defined( VK_NO_PROTOTYPES )
  class DispatchLoaderStatic
  {
  public:
    //=== VK_VERSION_1_0 ===

    VkResult vkCreateInstance( const VkInstanceCreateInfo *  pCreateInfo,
                               const VkAllocationCallbacks * pAllocator,
                               VkInstance *                  pInstance ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
    }

    void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyInstance( instance, pAllocator );
    }

    VkResult vkEnumeratePhysicalDevices( VkInstance         instance,
                                         uint32_t *         pPhysicalDeviceCount,
                                         VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
    }

    void vkGetPhysicalDeviceFeatures( VkPhysicalDevice           physicalDevice,
                                      VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
    }

    void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice     physicalDevice,
                                              VkFormat             format,
                                              VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
    }

    VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice          physicalDevice,
                                                       VkFormat                  format,
                                                       VkImageType               type,
                                                       VkImageTiling             tiling,
                                                       VkImageUsageFlags         usage,
                                                       VkImageCreateFlags        flags,
                                                       VkImageFormatProperties * pImageFormatProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceImageFormatProperties(
        physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
    }

    void vkGetPhysicalDeviceProperties( VkPhysicalDevice             physicalDevice,
                                        VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
    }

    void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice          physicalDevice,
                                                   uint32_t *                pQueueFamilyPropertyCount,
                                                   VkQueueFamilyProperties * pQueueFamilyProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceQueueFamilyProperties(
        physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
    }

    void vkGetPhysicalDeviceMemoryProperties(
      VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
    }

    PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetInstanceProcAddr( instance, pName );
    }

    PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceProcAddr( device, pName );
    }

    VkResult vkCreateDevice( VkPhysicalDevice              physicalDevice,
                             const VkDeviceCreateInfo *    pCreateInfo,
                             const VkAllocationCallbacks * pAllocator,
                             VkDevice *                    pDevice ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice );
    }

    void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyDevice( device, pAllocator );
    }

    VkResult vkEnumerateInstanceExtensionProperties( const char *            pLayerName,
                                                     uint32_t *              pPropertyCount,
                                                     VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties );
    }

    VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice        physicalDevice,
                                                   const char *            pLayerName,
                                                   uint32_t *              pPropertyCount,
                                                   VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
    }

    VkResult vkEnumerateInstanceLayerProperties( uint32_t *          pPropertyCount,
                                                 VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
    }

    VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice    physicalDevice,
                                               uint32_t *          pPropertyCount,
                                               VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
    }

    void vkGetDeviceQueue( VkDevice  device,
                           uint32_t  queueFamilyIndex,
                           uint32_t  queueIndex,
                           VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue );
    }

    VkResult vkQueueSubmit( VkQueue              queue,
                            uint32_t             submitCount,
                            const VkSubmitInfo * pSubmits,
                            VkFence              fence ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
    }

    VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkQueueWaitIdle( queue );
    }

    VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDeviceWaitIdle( device );
    }

    VkResult vkAllocateMemory( VkDevice                      device,
                               const VkMemoryAllocateInfo *  pAllocateInfo,
                               const VkAllocationCallbacks * pAllocator,
                               VkDeviceMemory *              pMemory ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
    }

    void vkFreeMemory( VkDevice                      device,
                       VkDeviceMemory                memory,
                       const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkFreeMemory( device, memory, pAllocator );
    }

    VkResult vkMapMemory( VkDevice         device,
                          VkDeviceMemory   memory,
                          VkDeviceSize     offset,
                          VkDeviceSize     size,
                          VkMemoryMapFlags flags,
                          void **          ppData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkMapMemory( device, memory, offset, size, flags, ppData );
    }

    void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkUnmapMemory( device, memory );
    }

    VkResult vkFlushMappedMemoryRanges( VkDevice                    device,
                                        uint32_t                    memoryRangeCount,
                                        const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
    }

    VkResult vkInvalidateMappedMemoryRanges( VkDevice                    device,
                                             uint32_t                    memoryRangeCount,
                                             const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
    }

    void vkGetDeviceMemoryCommitment( VkDevice       device,
                                      VkDeviceMemory memory,
                                      VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes );
    }

    VkResult vkBindBufferMemory( VkDevice       device,
                                 VkBuffer       buffer,
                                 VkDeviceMemory memory,
                                 VkDeviceSize   memoryOffset ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
    }

    VkResult vkBindImageMemory( VkDevice       device,
                                VkImage        image,
                                VkDeviceMemory memory,
                                VkDeviceSize   memoryOffset ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkBindImageMemory( device, image, memory, memoryOffset );
    }

    void vkGetBufferMemoryRequirements( VkDevice               device,
                                        VkBuffer               buffer,
                                        VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements );
    }

    void vkGetImageMemoryRequirements( VkDevice               device,
                                       VkImage                image,
                                       VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements );
    }

    void vkGetImageSparseMemoryRequirements( VkDevice                          device,
                                             VkImage                           image,
                                             uint32_t *                        pSparseMemoryRequirementCount,
                                             VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageSparseMemoryRequirements(
        device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
    }

    void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice                physicalDevice,
                                                         VkFormat                        format,
                                                         VkImageType                     type,
                                                         VkSampleCountFlagBits           samples,
                                                         VkImageUsageFlags               usage,
                                                         VkImageTiling                   tiling,
                                                         uint32_t *                      pPropertyCount,
                                                         VkSparseImageFormatProperties * pProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSparseImageFormatProperties(
        physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
    }

    VkResult vkQueueBindSparse( VkQueue                  queue,
                                uint32_t                 bindInfoCount,
                                const VkBindSparseInfo * pBindInfo,
                                VkFence                  fence ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
    }

    VkResult vkCreateFence( VkDevice                      device,
                            const VkFenceCreateInfo *     pCreateInfo,
                            const VkAllocationCallbacks * pAllocator,
                            VkFence *                     pFence ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
    }

    void vkDestroyFence( VkDevice                      device,
                         VkFence                       fence,
                         const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyFence( device, fence, pAllocator );
    }

    VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkResetFences( device, fenceCount, pFences );
    }

    VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetFenceStatus( device, fence );
    }

    VkResult vkWaitForFences( VkDevice        device,
                              uint32_t        fenceCount,
                              const VkFence * pFences,
                              VkBool32        waitAll,
                              uint64_t        timeout ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
    }

    VkResult vkCreateSemaphore( VkDevice                      device,
                                const VkSemaphoreCreateInfo * pCreateInfo,
                                const VkAllocationCallbacks * pAllocator,
                                VkSemaphore *                 pSemaphore ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore );
    }

    void vkDestroySemaphore( VkDevice                      device,
                             VkSemaphore                   semaphore,
                             const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroySemaphore( device, semaphore, pAllocator );
    }

    VkResult vkCreateEvent( VkDevice                      device,
                            const VkEventCreateInfo *     pCreateInfo,
                            const VkAllocationCallbacks * pAllocator,
                            VkEvent *                     pEvent ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
    }

    void vkDestroyEvent( VkDevice                      device,
                         VkEvent                       event,
                         const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyEvent( device, event, pAllocator );
    }

    VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetEventStatus( device, event );
    }

    VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkSetEvent( device, event );
    }

    VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkResetEvent( device, event );
    }

    VkResult vkCreateQueryPool( VkDevice                      device,
                                const VkQueryPoolCreateInfo * pCreateInfo,
                                const VkAllocationCallbacks * pAllocator,
                                VkQueryPool *                 pQueryPool ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool );
    }

    void vkDestroyQueryPool( VkDevice                      device,
                             VkQueryPool                   queryPool,
                             const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyQueryPool( device, queryPool, pAllocator );
    }

    VkResult vkGetQueryPoolResults( VkDevice           device,
                                    VkQueryPool        queryPool,
                                    uint32_t           firstQuery,
                                    uint32_t           queryCount,
                                    size_t             dataSize,
                                    void *             pData,
                                    VkDeviceSize       stride,
                                    VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
    }

    VkResult vkCreateBuffer( VkDevice                      device,
                             const VkBufferCreateInfo *    pCreateInfo,
                             const VkAllocationCallbacks * pAllocator,
                             VkBuffer *                    pBuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
    }

    void vkDestroyBuffer( VkDevice                      device,
                          VkBuffer                      buffer,
                          const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyBuffer( device, buffer, pAllocator );
    }

    VkResult vkCreateBufferView( VkDevice                       device,
                                 const VkBufferViewCreateInfo * pCreateInfo,
                                 const VkAllocationCallbacks *  pAllocator,
                                 VkBufferView *                 pView ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
    }

    void vkDestroyBufferView( VkDevice                      device,
                              VkBufferView                  bufferView,
                              const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyBufferView( device, bufferView, pAllocator );
    }

    VkResult vkCreateImage( VkDevice                      device,
                            const VkImageCreateInfo *     pCreateInfo,
                            const VkAllocationCallbacks * pAllocator,
                            VkImage *                     pImage ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
    }

    void vkDestroyImage( VkDevice                      device,
                         VkImage                       image,
                         const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyImage( device, image, pAllocator );
    }

    void vkGetImageSubresourceLayout( VkDevice                   device,
                                      VkImage                    image,
                                      const VkImageSubresource * pSubresource,
                                      VkSubresourceLayout *      pLayout ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout );
    }

    VkResult vkCreateImageView( VkDevice                      device,
                                const VkImageViewCreateInfo * pCreateInfo,
                                const VkAllocationCallbacks * pAllocator,
                                VkImageView *                 pView ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
    }

    void vkDestroyImageView( VkDevice                      device,
                             VkImageView                   imageView,
                             const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyImageView( device, imageView, pAllocator );
    }

    VkResult vkCreateShaderModule( VkDevice                         device,
                                   const VkShaderModuleCreateInfo * pCreateInfo,
                                   const VkAllocationCallbacks *    pAllocator,
                                   VkShaderModule *                 pShaderModule ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule );
    }

    void vkDestroyShaderModule( VkDevice                      device,
                                VkShaderModule                shaderModule,
                                const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyShaderModule( device, shaderModule, pAllocator );
    }

    VkResult vkCreatePipelineCache( VkDevice                          device,
                                    const VkPipelineCacheCreateInfo * pCreateInfo,
                                    const VkAllocationCallbacks *     pAllocator,
                                    VkPipelineCache *                 pPipelineCache ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache );
    }

    void vkDestroyPipelineCache( VkDevice                      device,
                                 VkPipelineCache               pipelineCache,
                                 const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator );
    }

    VkResult vkGetPipelineCacheData( VkDevice        device,
                                     VkPipelineCache pipelineCache,
                                     size_t *        pDataSize,
                                     void *          pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
    }

    VkResult vkMergePipelineCaches( VkDevice                device,
                                    VkPipelineCache         dstCache,
                                    uint32_t                srcCacheCount,
                                    const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
    }

    VkResult vkCreateGraphicsPipelines( VkDevice                             device,
                                        VkPipelineCache                      pipelineCache,
                                        uint32_t                             createInfoCount,
                                        const VkGraphicsPipelineCreateInfo * pCreateInfos,
                                        const VkAllocationCallbacks *        pAllocator,
                                        VkPipeline *                         pPipelines ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateGraphicsPipelines(
        device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
    }

    VkResult vkCreateComputePipelines( VkDevice                            device,
                                       VkPipelineCache                     pipelineCache,
                                       uint32_t                            createInfoCount,
                                       const VkComputePipelineCreateInfo * pCreateInfos,
                                       const VkAllocationCallbacks *       pAllocator,
                                       VkPipeline *                        pPipelines ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
    }

    void vkDestroyPipeline( VkDevice                      device,
                            VkPipeline                    pipeline,
                            const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyPipeline( device, pipeline, pAllocator );
    }

    VkResult vkCreatePipelineLayout( VkDevice                           device,
                                     const VkPipelineLayoutCreateInfo * pCreateInfo,
                                     const VkAllocationCallbacks *      pAllocator,
                                     VkPipelineLayout *                 pPipelineLayout ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout );
    }

    void vkDestroyPipelineLayout( VkDevice                      device,
                                  VkPipelineLayout              pipelineLayout,
                                  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator );
    }

    VkResult vkCreateSampler( VkDevice                      device,
                              const VkSamplerCreateInfo *   pCreateInfo,
                              const VkAllocationCallbacks * pAllocator,
                              VkSampler *                   pSampler ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler );
    }

    void vkDestroySampler( VkDevice                      device,
                           VkSampler                     sampler,
                           const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroySampler( device, sampler, pAllocator );
    }

    VkResult vkCreateDescriptorSetLayout( VkDevice                                device,
                                          const VkDescriptorSetLayoutCreateInfo * pCreateInfo,
                                          const VkAllocationCallbacks *           pAllocator,
                                          VkDescriptorSetLayout *                 pSetLayout ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout );
    }

    void vkDestroyDescriptorSetLayout( VkDevice                      device,
                                       VkDescriptorSetLayout         descriptorSetLayout,
                                       const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator );
    }

    VkResult vkCreateDescriptorPool( VkDevice                           device,
                                     const VkDescriptorPoolCreateInfo * pCreateInfo,
                                     const VkAllocationCallbacks *      pAllocator,
                                     VkDescriptorPool *                 pDescriptorPool ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
    }

    void vkDestroyDescriptorPool( VkDevice                      device,
                                  VkDescriptorPool              descriptorPool,
                                  const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator );
    }

    VkResult vkResetDescriptorPool( VkDevice                   device,
                                    VkDescriptorPool           descriptorPool,
                                    VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkResetDescriptorPool( device, descriptorPool, flags );
    }

    VkResult vkAllocateDescriptorSets( VkDevice                            device,
                                       const VkDescriptorSetAllocateInfo * pAllocateInfo,
                                       VkDescriptorSet *                   pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets );
    }

    VkResult vkFreeDescriptorSets( VkDevice                device,
                                   VkDescriptorPool        descriptorPool,
                                   uint32_t                descriptorSetCount,
                                   const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets );
    }

    void vkUpdateDescriptorSets( VkDevice                     device,
                                 uint32_t                     descriptorWriteCount,
                                 const VkWriteDescriptorSet * pDescriptorWrites,
                                 uint32_t                     descriptorCopyCount,
                                 const VkCopyDescriptorSet *  pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkUpdateDescriptorSets(
        device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
    }

    VkResult vkCreateFramebuffer( VkDevice                        device,
                                  const VkFramebufferCreateInfo * pCreateInfo,
                                  const VkAllocationCallbacks *   pAllocator,
                                  VkFramebuffer *                 pFramebuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
    }

    void vkDestroyFramebuffer( VkDevice                      device,
                               VkFramebuffer                 framebuffer,
                               const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyFramebuffer( device, framebuffer, pAllocator );
    }

    VkResult vkCreateRenderPass( VkDevice                       device,
                                 const VkRenderPassCreateInfo * pCreateInfo,
                                 const VkAllocationCallbacks *  pAllocator,
                                 VkRenderPass *                 pRenderPass ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass );
    }

    void vkDestroyRenderPass( VkDevice                      device,
                              VkRenderPass                  renderPass,
                              const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyRenderPass( device, renderPass, pAllocator );
    }

    void vkGetRenderAreaGranularity( VkDevice     device,
                                     VkRenderPass renderPass,
                                     VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
    }

    VkResult vkCreateCommandPool( VkDevice                        device,
                                  const VkCommandPoolCreateInfo * pCreateInfo,
                                  const VkAllocationCallbacks *   pAllocator,
                                  VkCommandPool *                 pCommandPool ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
    }

    void vkDestroyCommandPool( VkDevice                      device,
                               VkCommandPool                 commandPool,
                               const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyCommandPool( device, commandPool, pAllocator );
    }

    VkResult vkResetCommandPool( VkDevice                device,
                                 VkCommandPool           commandPool,
                                 VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkResetCommandPool( device, commandPool, flags );
    }

    VkResult vkAllocateCommandBuffers( VkDevice                            device,
                                       const VkCommandBufferAllocateInfo * pAllocateInfo,
                                       VkCommandBuffer *                   pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
    }

    void vkFreeCommandBuffers( VkDevice                device,
                               VkCommandPool           commandPool,
                               uint32_t                commandBufferCount,
                               const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers );
    }

    VkResult vkBeginCommandBuffer( VkCommandBuffer                  commandBuffer,
                                   const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo );
    }

    VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkEndCommandBuffer( commandBuffer );
    }

    VkResult vkResetCommandBuffer( VkCommandBuffer           commandBuffer,
                                   VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkResetCommandBuffer( commandBuffer, flags );
    }

    void vkCmdBindPipeline( VkCommandBuffer     commandBuffer,
                            VkPipelineBindPoint pipelineBindPoint,
                            VkPipeline          pipeline ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline );
    }

    void vkCmdSetViewport( VkCommandBuffer    commandBuffer,
                           uint32_t           firstViewport,
                           uint32_t           viewportCount,
                           const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports );
    }

    void vkCmdSetScissor( VkCommandBuffer  commandBuffer,
                          uint32_t         firstScissor,
                          uint32_t         scissorCount,
                          const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors );
    }

    void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetLineWidth( commandBuffer, lineWidth );
    }

    void vkCmdSetDepthBias( VkCommandBuffer commandBuffer,
                            float           depthBiasConstantFactor,
                            float           depthBiasClamp,
                            float           depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
    }

    void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer,
                                 const float     blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetBlendConstants( commandBuffer, blendConstants );
    }

    void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer,
                              float           minDepthBounds,
                              float           maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds );
    }

    void vkCmdSetStencilCompareMask( VkCommandBuffer    commandBuffer,
                                     VkStencilFaceFlags faceMask,
                                     uint32_t           compareMask ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask );
    }

    void vkCmdSetStencilWriteMask( VkCommandBuffer    commandBuffer,
                                   VkStencilFaceFlags faceMask,
                                   uint32_t           writeMask ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask );
    }

    void vkCmdSetStencilReference( VkCommandBuffer    commandBuffer,
                                   VkStencilFaceFlags faceMask,
                                   uint32_t           reference ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference );
    }

    void vkCmdBindDescriptorSets( VkCommandBuffer         commandBuffer,
                                  VkPipelineBindPoint     pipelineBindPoint,
                                  VkPipelineLayout        layout,
                                  uint32_t                firstSet,
                                  uint32_t                descriptorSetCount,
                                  const VkDescriptorSet * pDescriptorSets,
                                  uint32_t                dynamicOffsetCount,
                                  const uint32_t *        pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBindDescriptorSets( commandBuffer,
                                        pipelineBindPoint,
                                        layout,
                                        firstSet,
                                        descriptorSetCount,
                                        pDescriptorSets,
                                        dynamicOffsetCount,
                                        pDynamicOffsets );
    }

    void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer,
                               VkBuffer        buffer,
                               VkDeviceSize    offset,
                               VkIndexType     indexType ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType );
    }

    void vkCmdBindVertexBuffers( VkCommandBuffer      commandBuffer,
                                 uint32_t             firstBinding,
                                 uint32_t             bindingCount,
                                 const VkBuffer *     pBuffers,
                                 const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets );
    }

    void vkCmdDraw( VkCommandBuffer commandBuffer,
                    uint32_t        vertexCount,
                    uint32_t        instanceCount,
                    uint32_t        firstVertex,
                    uint32_t        firstInstance ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
    }

    void vkCmdDrawIndexed( VkCommandBuffer commandBuffer,
                           uint32_t        indexCount,
                           uint32_t        instanceCount,
                           uint32_t        firstIndex,
                           int32_t         vertexOffset,
                           uint32_t        firstInstance ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
    }

    void vkCmdDrawIndirect( VkCommandBuffer commandBuffer,
                            VkBuffer        buffer,
                            VkDeviceSize    offset,
                            uint32_t        drawCount,
                            uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride );
    }

    void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer,
                                   VkBuffer        buffer,
                                   VkDeviceSize    offset,
                                   uint32_t        drawCount,
                                   uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride );
    }

    void vkCmdDispatch( VkCommandBuffer commandBuffer,
                        uint32_t        groupCountX,
                        uint32_t        groupCountY,
                        uint32_t        groupCountZ ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ );
    }

    void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer,
                                VkBuffer        buffer,
                                VkDeviceSize    offset ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset );
    }

    void vkCmdCopyBuffer( VkCommandBuffer      commandBuffer,
                          VkBuffer             srcBuffer,
                          VkBuffer             dstBuffer,
                          uint32_t             regionCount,
                          const VkBufferCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions );
    }

    void vkCmdCopyImage( VkCommandBuffer     commandBuffer,
                         VkImage             srcImage,
                         VkImageLayout       srcImageLayout,
                         VkImage             dstImage,
                         VkImageLayout       dstImageLayout,
                         uint32_t            regionCount,
                         const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyImage(
        commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
    }

    void vkCmdBlitImage( VkCommandBuffer     commandBuffer,
                         VkImage             srcImage,
                         VkImageLayout       srcImageLayout,
                         VkImage             dstImage,
                         VkImageLayout       dstImageLayout,
                         uint32_t            regionCount,
                         const VkImageBlit * pRegions,
                         VkFilter            filter ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBlitImage(
        commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
    }

    void vkCmdCopyBufferToImage( VkCommandBuffer           commandBuffer,
                                 VkBuffer                  srcBuffer,
                                 VkImage                   dstImage,
                                 VkImageLayout             dstImageLayout,
                                 uint32_t                  regionCount,
                                 const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
    }

    void vkCmdCopyImageToBuffer( VkCommandBuffer           commandBuffer,
                                 VkImage                   srcImage,
                                 VkImageLayout             srcImageLayout,
                                 VkBuffer                  dstBuffer,
                                 uint32_t                  regionCount,
                                 const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
    }

    void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer,
                            VkBuffer        dstBuffer,
                            VkDeviceSize    dstOffset,
                            VkDeviceSize    dataSize,
                            const void *    pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData );
    }

    void vkCmdFillBuffer( VkCommandBuffer commandBuffer,
                          VkBuffer        dstBuffer,
                          VkDeviceSize    dstOffset,
                          VkDeviceSize    size,
                          uint32_t        data ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data );
    }

    void vkCmdClearColorImage( VkCommandBuffer                 commandBuffer,
                               VkImage                         image,
                               VkImageLayout                   imageLayout,
                               const VkClearColorValue *       pColor,
                               uint32_t                        rangeCount,
                               const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges );
    }

    void vkCmdClearDepthStencilImage( VkCommandBuffer                  commandBuffer,
                                      VkImage                          image,
                                      VkImageLayout                    imageLayout,
                                      const VkClearDepthStencilValue * pDepthStencil,
                                      uint32_t                         rangeCount,
                                      const VkImageSubresourceRange *  pRanges ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges );
    }

    void vkCmdClearAttachments( VkCommandBuffer           commandBuffer,
                                uint32_t                  attachmentCount,
                                const VkClearAttachment * pAttachments,
                                uint32_t                  rectCount,
                                const VkClearRect *       pRects ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects );
    }

    void vkCmdResolveImage( VkCommandBuffer        commandBuffer,
                            VkImage                srcImage,
                            VkImageLayout          srcImageLayout,
                            VkImage                dstImage,
                            VkImageLayout          dstImageLayout,
                            uint32_t               regionCount,
                            const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdResolveImage(
        commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
    }

    void vkCmdSetEvent( VkCommandBuffer      commandBuffer,
                        VkEvent              event,
                        VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetEvent( commandBuffer, event, stageMask );
    }

    void vkCmdResetEvent( VkCommandBuffer      commandBuffer,
                          VkEvent              event,
                          VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdResetEvent( commandBuffer, event, stageMask );
    }

    void vkCmdWaitEvents( VkCommandBuffer               commandBuffer,
                          uint32_t                      eventCount,
                          const VkEvent *               pEvents,
                          VkPipelineStageFlags          srcStageMask,
                          VkPipelineStageFlags          dstStageMask,
                          uint32_t                      memoryBarrierCount,
                          const VkMemoryBarrier *       pMemoryBarriers,
                          uint32_t                      bufferMemoryBarrierCount,
                          const VkBufferMemoryBarrier * pBufferMemoryBarriers,
                          uint32_t                      imageMemoryBarrierCount,
                          const VkImageMemoryBarrier *  pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdWaitEvents( commandBuffer,
                                eventCount,
                                pEvents,
                                srcStageMask,
                                dstStageMask,
                                memoryBarrierCount,
                                pMemoryBarriers,
                                bufferMemoryBarrierCount,
                                pBufferMemoryBarriers,
                                imageMemoryBarrierCount,
                                pImageMemoryBarriers );
    }

    void vkCmdPipelineBarrier( VkCommandBuffer               commandBuffer,
                               VkPipelineStageFlags          srcStageMask,
                               VkPipelineStageFlags          dstStageMask,
                               VkDependencyFlags             dependencyFlags,
                               uint32_t                      memoryBarrierCount,
                               const VkMemoryBarrier *       pMemoryBarriers,
                               uint32_t                      bufferMemoryBarrierCount,
                               const VkBufferMemoryBarrier * pBufferMemoryBarriers,
                               uint32_t                      imageMemoryBarrierCount,
                               const VkImageMemoryBarrier *  pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdPipelineBarrier( commandBuffer,
                                     srcStageMask,
                                     dstStageMask,
                                     dependencyFlags,
                                     memoryBarrierCount,
                                     pMemoryBarriers,
                                     bufferMemoryBarrierCount,
                                     pBufferMemoryBarriers,
                                     imageMemoryBarrierCount,
                                     pImageMemoryBarriers );
    }

    void vkCmdBeginQuery( VkCommandBuffer     commandBuffer,
                          VkQueryPool         queryPool,
                          uint32_t            query,
                          VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags );
    }

    void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEndQuery( commandBuffer, queryPool, query );
    }

    void vkCmdResetQueryPool( VkCommandBuffer commandBuffer,
                              VkQueryPool     queryPool,
                              uint32_t        firstQuery,
                              uint32_t        queryCount ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
    }

    void vkCmdWriteTimestamp( VkCommandBuffer         commandBuffer,
                              VkPipelineStageFlagBits pipelineStage,
                              VkQueryPool             queryPool,
                              uint32_t                query ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
    }

    void vkCmdCopyQueryPoolResults( VkCommandBuffer    commandBuffer,
                                    VkQueryPool        queryPool,
                                    uint32_t           firstQuery,
                                    uint32_t           queryCount,
                                    VkBuffer           dstBuffer,
                                    VkDeviceSize       dstOffset,
                                    VkDeviceSize       stride,
                                    VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyQueryPoolResults(
        commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
    }

    void vkCmdPushConstants( VkCommandBuffer    commandBuffer,
                             VkPipelineLayout   layout,
                             VkShaderStageFlags stageFlags,
                             uint32_t           offset,
                             uint32_t           size,
                             const void *       pValues ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues );
    }

    void vkCmdBeginRenderPass( VkCommandBuffer               commandBuffer,
                               const VkRenderPassBeginInfo * pRenderPassBegin,
                               VkSubpassContents             contents ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents );
    }

    void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdNextSubpass( commandBuffer, contents );
    }

    void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEndRenderPass( commandBuffer );
    }

    void vkCmdExecuteCommands( VkCommandBuffer         commandBuffer,
                               uint32_t                commandBufferCount,
                               const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers );
    }

    //=== VK_VERSION_1_1 ===

    VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkEnumerateInstanceVersion( pApiVersion );
    }

    VkResult vkBindBufferMemory2( VkDevice                       device,
                                  uint32_t                       bindInfoCount,
                                  const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
    }

    VkResult vkBindImageMemory2( VkDevice                      device,
                                 uint32_t                      bindInfoCount,
                                 const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
    }

    void vkGetDeviceGroupPeerMemoryFeatures( VkDevice                   device,
                                             uint32_t                   heapIndex,
                                             uint32_t                   localDeviceIndex,
                                             uint32_t                   remoteDeviceIndex,
                                             VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceGroupPeerMemoryFeatures(
        device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
    }

    void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDeviceMask( commandBuffer, deviceMask );
    }

    void vkCmdDispatchBase( VkCommandBuffer commandBuffer,
                            uint32_t        baseGroupX,
                            uint32_t        baseGroupY,
                            uint32_t        baseGroupZ,
                            uint32_t        groupCountX,
                            uint32_t        groupCountY,
                            uint32_t        groupCountZ ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDispatchBase(
        commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
    }

    VkResult vkEnumeratePhysicalDeviceGroups( VkInstance                        instance,
                                              uint32_t *                        pPhysicalDeviceGroupCount,
                                              VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
    }

    void vkGetImageMemoryRequirements2( VkDevice                               device,
                                        const VkImageMemoryRequirementsInfo2 * pInfo,
                                        VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements );
    }

    void vkGetBufferMemoryRequirements2( VkDevice                                device,
                                         const VkBufferMemoryRequirementsInfo2 * pInfo,
                                         VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements );
    }

    void vkGetImageSparseMemoryRequirements2( VkDevice                                     device,
                                              const VkImageSparseMemoryRequirementsInfo2 * pInfo,
                                              uint32_t *                         pSparseMemoryRequirementCount,
                                              VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageSparseMemoryRequirements2(
        device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
    }

    void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice            physicalDevice,
                                       VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
    }

    void vkGetPhysicalDeviceProperties2( VkPhysicalDevice              physicalDevice,
                                         VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
    }

    void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice      physicalDevice,
                                               VkFormat              format,
                                               VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties );
    }

    VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice                         physicalDevice,
                                                        const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
                                                        VkImageFormatProperties2 * pImageFormatProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
    }

    void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice           physicalDevice,
                                                    uint32_t *                 pQueueFamilyPropertyCount,
                                                    VkQueueFamilyProperties2 * pQueueFamilyProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceQueueFamilyProperties2(
        physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
    }

    void vkGetPhysicalDeviceMemoryProperties2(
      VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
    }

    void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice                               physicalDevice,
                                                          const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
                                                          uint32_t *                                     pPropertyCount,
                                                          VkSparseImageFormatProperties2 * pProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSparseImageFormatProperties2(
        physicalDevice, pFormatInfo, pPropertyCount, pProperties );
    }

    void vkTrimCommandPool( VkDevice               device,
                            VkCommandPool          commandPool,
                            VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkTrimCommandPool( device, commandPool, flags );
    }

    void vkGetDeviceQueue2( VkDevice                   device,
                            const VkDeviceQueueInfo2 * pQueueInfo,
                            VkQueue *                  pQueue ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue );
    }

    VkResult vkCreateSamplerYcbcrConversion( VkDevice                                   device,
                                             const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,
                                             const VkAllocationCallbacks *              pAllocator,
                                             VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion );
    }

    void vkDestroySamplerYcbcrConversion( VkDevice                      device,
                                          VkSamplerYcbcrConversion      ycbcrConversion,
                                          const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator );
    }

    VkResult vkCreateDescriptorUpdateTemplate( VkDevice                                     device,
                                               const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,
                                               const VkAllocationCallbacks *                pAllocator,
                                               VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
    }

    void vkDestroyDescriptorUpdateTemplate( VkDevice                      device,
                                            VkDescriptorUpdateTemplate    descriptorUpdateTemplate,
                                            const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator );
    }

    void vkUpdateDescriptorSetWithTemplate( VkDevice                   device,
                                            VkDescriptorSet            descriptorSet,
                                            VkDescriptorUpdateTemplate descriptorUpdateTemplate,
                                            const void *               pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData );
    }

    void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice                           physicalDevice,
                                                      const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
                                                      VkExternalBufferProperties * pExternalBufferProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceExternalBufferProperties(
        physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
    }

    void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice                          physicalDevice,
                                                     const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
                                                     VkExternalFenceProperties * pExternalFenceProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceExternalFenceProperties(
        physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
    }

    void vkGetPhysicalDeviceExternalSemaphoreProperties(
      VkPhysicalDevice                              physicalDevice,
      const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
      VkExternalSemaphoreProperties *               pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceExternalSemaphoreProperties(
        physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
    }

    void vkGetDescriptorSetLayoutSupport( VkDevice                                device,
                                          const VkDescriptorSetLayoutCreateInfo * pCreateInfo,
                                          VkDescriptorSetLayoutSupport *          pSupport ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport );
    }

    //=== VK_VERSION_1_2 ===

    void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer,
                                 VkBuffer        buffer,
                                 VkDeviceSize    offset,
                                 VkBuffer        countBuffer,
                                 VkDeviceSize    countBufferOffset,
                                 uint32_t        maxDrawCount,
                                 uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndirectCount(
        commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
    }

    void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer,
                                        VkBuffer        buffer,
                                        VkDeviceSize    offset,
                                        VkBuffer        countBuffer,
                                        VkDeviceSize    countBufferOffset,
                                        uint32_t        maxDrawCount,
                                        uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndexedIndirectCount(
        commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
    }

    VkResult vkCreateRenderPass2( VkDevice                        device,
                                  const VkRenderPassCreateInfo2 * pCreateInfo,
                                  const VkAllocationCallbacks *   pAllocator,
                                  VkRenderPass *                  pRenderPass ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass );
    }

    void vkCmdBeginRenderPass2( VkCommandBuffer               commandBuffer,
                                const VkRenderPassBeginInfo * pRenderPassBegin,
                                const VkSubpassBeginInfo *    pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
    }

    void vkCmdNextSubpass2( VkCommandBuffer            commandBuffer,
                            const VkSubpassBeginInfo * pSubpassBeginInfo,
                            const VkSubpassEndInfo *   pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
    }

    void vkCmdEndRenderPass2( VkCommandBuffer          commandBuffer,
                              const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo );
    }

    void vkResetQueryPool( VkDevice    device,
                           VkQueryPool queryPool,
                           uint32_t    firstQuery,
                           uint32_t    queryCount ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
    }

    VkResult
      vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
    }

    VkResult vkWaitSemaphores( VkDevice                    device,
                               const VkSemaphoreWaitInfo * pWaitInfo,
                               uint64_t                    timeout ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkWaitSemaphores( device, pWaitInfo, timeout );
    }

    VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkSignalSemaphore( device, pSignalInfo );
    }

    VkDeviceAddress vkGetBufferDeviceAddress( VkDevice                          device,
                                              const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetBufferDeviceAddress( device, pInfo );
    }

    uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice                          device,
                                              const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetBufferOpaqueCaptureAddress( device, pInfo );
    }

    uint64_t vkGetDeviceMemoryOpaqueCaptureAddress(
      VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo );
    }

    //=== VK_KHR_surface ===

    void vkDestroySurfaceKHR( VkInstance                    instance,
                              VkSurfaceKHR                  surface,
                              const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
    }

    VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice,
                                                   uint32_t         queueFamilyIndex,
                                                   VkSurfaceKHR     surface,
                                                   VkBool32 *       pSupported ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
    }

    VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice           physicalDevice,
                                                        VkSurfaceKHR               surface,
                                                        VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
    }

    VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice     physicalDevice,
                                                   VkSurfaceKHR         surface,
                                                   uint32_t *           pSurfaceFormatCount,
                                                   VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats );
    }

    VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice   physicalDevice,
                                                        VkSurfaceKHR       surface,
                                                        uint32_t *         pPresentModeCount,
                                                        VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes );
    }

    //=== VK_KHR_swapchain ===

    VkResult vkCreateSwapchainKHR( VkDevice                         device,
                                   const VkSwapchainCreateInfoKHR * pCreateInfo,
                                   const VkAllocationCallbacks *    pAllocator,
                                   VkSwapchainKHR *                 pSwapchain ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain );
    }

    void vkDestroySwapchainKHR( VkDevice                      device,
                                VkSwapchainKHR                swapchain,
                                const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroySwapchainKHR( device, swapchain, pAllocator );
    }

    VkResult vkGetSwapchainImagesKHR( VkDevice       device,
                                      VkSwapchainKHR swapchain,
                                      uint32_t *     pSwapchainImageCount,
                                      VkImage *      pSwapchainImages ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
    }

    VkResult vkAcquireNextImageKHR( VkDevice       device,
                                    VkSwapchainKHR swapchain,
                                    uint64_t       timeout,
                                    VkSemaphore    semaphore,
                                    VkFence        fence,
                                    uint32_t *     pImageIndex ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
    }

    VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkQueuePresentKHR( queue, pPresentInfo );
    }

    VkResult vkGetDeviceGroupPresentCapabilitiesKHR(
      VkDevice device, VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities );
    }

    VkResult vkGetDeviceGroupSurfacePresentModesKHR(
      VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes );
    }

    VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice,
                                                      VkSurfaceKHR     surface,
                                                      uint32_t *       pRectCount,
                                                      VkRect2D *       pRects ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
    }

    VkResult vkAcquireNextImage2KHR( VkDevice                          device,
                                     const VkAcquireNextImageInfoKHR * pAcquireInfo,
                                     uint32_t *                        pImageIndex ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
    }

    //=== VK_KHR_display ===

    VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice         physicalDevice,
                                                      uint32_t *               pPropertyCount,
                                                      VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties );
    }

    VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice              physicalDevice,
                                                           uint32_t *                    pPropertyCount,
                                                           VkDisplayPlanePropertiesKHR * pProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
    }

    VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice,
                                                    uint32_t         planeIndex,
                                                    uint32_t *       pDisplayCount,
                                                    VkDisplayKHR *   pDisplays ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays );
    }

    VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice             physicalDevice,
                                            VkDisplayKHR                 display,
                                            uint32_t *                   pPropertyCount,
                                            VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties );
    }

    VkResult vkCreateDisplayModeKHR( VkPhysicalDevice                   physicalDevice,
                                     VkDisplayKHR                       display,
                                     const VkDisplayModeCreateInfoKHR * pCreateInfo,
                                     const VkAllocationCallbacks *      pAllocator,
                                     VkDisplayModeKHR *                 pMode ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode );
    }

    VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice                physicalDevice,
                                               VkDisplayModeKHR                mode,
                                               uint32_t                        planeIndex,
                                               VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities );
    }

    VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance                            instance,
                                             const VkDisplaySurfaceCreateInfoKHR * pCreateInfo,
                                             const VkAllocationCallbacks *         pAllocator,
                                             VkSurfaceKHR *                        pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
    }

    //=== VK_KHR_display_swapchain ===

    VkResult vkCreateSharedSwapchainsKHR( VkDevice                         device,
                                          uint32_t                         swapchainCount,
                                          const VkSwapchainCreateInfoKHR * pCreateInfos,
                                          const VkAllocationCallbacks *    pAllocator,
                                          VkSwapchainKHR *                 pSwapchains ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains );
    }

#  if defined( VK_USE_PLATFORM_XLIB_KHR )
    //=== VK_KHR_xlib_surface ===

    VkResult vkCreateXlibSurfaceKHR( VkInstance                         instance,
                                     const VkXlibSurfaceCreateInfoKHR * pCreateInfo,
                                     const VkAllocationCallbacks *      pAllocator,
                                     VkSurfaceKHR *                     pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
    }

    VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice,
                                                            uint32_t         queueFamilyIndex,
                                                            Display *        dpy,
                                                            VisualID         visualID ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID );
    }
#  endif /*VK_USE_PLATFORM_XLIB_KHR*/

#  if defined( VK_USE_PLATFORM_XCB_KHR )
    //=== VK_KHR_xcb_surface ===

    VkResult vkCreateXcbSurfaceKHR( VkInstance                        instance,
                                    const VkXcbSurfaceCreateInfoKHR * pCreateInfo,
                                    const VkAllocationCallbacks *     pAllocator,
                                    VkSurfaceKHR *                    pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
    }

    VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice   physicalDevice,
                                                           uint32_t           queueFamilyIndex,
                                                           xcb_connection_t * connection,
                                                           xcb_visualid_t     visual_id ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id );
    }
#  endif /*VK_USE_PLATFORM_XCB_KHR*/

#  if defined( VK_USE_PLATFORM_WAYLAND_KHR )
    //=== VK_KHR_wayland_surface ===

    VkResult vkCreateWaylandSurfaceKHR( VkInstance                            instance,
                                        const VkWaylandSurfaceCreateInfoKHR * pCreateInfo,
                                        const VkAllocationCallbacks *         pAllocator,
                                        VkSurfaceKHR *                        pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
    }

    VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice    physicalDevice,
                                                               uint32_t            queueFamilyIndex,
                                                               struct wl_display * display ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display );
    }
#  endif /*VK_USE_PLATFORM_WAYLAND_KHR*/

#  if defined( VK_USE_PLATFORM_ANDROID_KHR )
    //=== VK_KHR_android_surface ===

    VkResult vkCreateAndroidSurfaceKHR( VkInstance                            instance,
                                        const VkAndroidSurfaceCreateInfoKHR * pCreateInfo,
                                        const VkAllocationCallbacks *         pAllocator,
                                        VkSurfaceKHR *                        pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
    }
#  endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#  if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_KHR_win32_surface ===

    VkResult vkCreateWin32SurfaceKHR( VkInstance                          instance,
                                      const VkWin32SurfaceCreateInfoKHR * pCreateInfo,
                                      const VkAllocationCallbacks *       pAllocator,
                                      VkSurfaceKHR *                      pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
    }

    VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice,
                                                             uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
    }
#  endif /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_EXT_debug_report ===

    VkResult vkCreateDebugReportCallbackEXT( VkInstance                                 instance,
                                             const VkDebugReportCallbackCreateInfoEXT * pCreateInfo,
                                             const VkAllocationCallbacks *              pAllocator,
                                             VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
    }

    void vkDestroyDebugReportCallbackEXT( VkInstance                    instance,
                                          VkDebugReportCallbackEXT      callback,
                                          const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator );
    }

    void vkDebugReportMessageEXT( VkInstance                 instance,
                                  VkDebugReportFlagsEXT      flags,
                                  VkDebugReportObjectTypeEXT objectType,
                                  uint64_t                   object,
                                  size_t                     location,
                                  int32_t                    messageCode,
                                  const char *               pLayerPrefix,
                                  const char *               pMessage ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDebugReportMessageEXT(
        instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
    }

    //=== VK_EXT_debug_marker ===

    VkResult vkDebugMarkerSetObjectTagEXT( VkDevice                              device,
                                           const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo );
    }

    VkResult vkDebugMarkerSetObjectNameEXT( VkDevice                               device,
                                            const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo );
    }

    void vkCmdDebugMarkerBeginEXT( VkCommandBuffer                    commandBuffer,
                                   const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo );
    }

    void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDebugMarkerEndEXT( commandBuffer );
    }

    void vkCmdDebugMarkerInsertEXT( VkCommandBuffer                    commandBuffer,
                                    const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo );
    }

#  if defined( VK_ENABLE_BETA_EXTENSIONS )
    //=== VK_KHR_video_queue ===

    VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice          physicalDevice,
                                                      const VkVideoProfileKHR * pVideoProfile,
                                                      VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities );
    }

    VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice                           physicalDevice,
                                                          const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
                                                          uint32_t *                   pVideoFormatPropertyCount,
                                                          VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR(
        physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties );
    }

    VkResult vkCreateVideoSessionKHR( VkDevice                            device,
                                      const VkVideoSessionCreateInfoKHR * pCreateInfo,
                                      const VkAllocationCallbacks *       pAllocator,
                                      VkVideoSessionKHR *                 pVideoSession ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession );
    }

    void vkDestroyVideoSessionKHR( VkDevice                      device,
                                   VkVideoSessionKHR             videoSession,
                                   const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator );
    }

    VkResult vkGetVideoSessionMemoryRequirementsKHR(
      VkDevice                        device,
      VkVideoSessionKHR               videoSession,
      uint32_t *                      pVideoSessionMemoryRequirementsCount,
      VkVideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetVideoSessionMemoryRequirementsKHR(
        device, videoSession, pVideoSessionMemoryRequirementsCount, pVideoSessionMemoryRequirements );
    }

    VkResult
      vkBindVideoSessionMemoryKHR( VkDevice                     device,
                                   VkVideoSessionKHR            videoSession,
                                   uint32_t                     videoSessionBindMemoryCount,
                                   const VkVideoBindMemoryKHR * pVideoSessionBindMemories ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkBindVideoSessionMemoryKHR(
        device, videoSession, videoSessionBindMemoryCount, pVideoSessionBindMemories );
    }

    VkResult vkCreateVideoSessionParametersKHR( VkDevice                                      device,
                                                const VkVideoSessionParametersCreateInfoKHR * pCreateInfo,
                                                const VkAllocationCallbacks *                 pAllocator,
                                                VkVideoSessionParametersKHR * pVideoSessionParameters ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters );
    }

    VkResult vkUpdateVideoSessionParametersKHR( VkDevice                                      device,
                                                VkVideoSessionParametersKHR                   videoSessionParameters,
                                                const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo );
    }

    void vkDestroyVideoSessionParametersKHR( VkDevice                      device,
                                             VkVideoSessionParametersKHR   videoSessionParameters,
                                             const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator );
    }

    void vkCmdBeginVideoCodingKHR( VkCommandBuffer                   commandBuffer,
                                   const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo );
    }

    void vkCmdEndVideoCodingKHR( VkCommandBuffer                 commandBuffer,
                                 const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo );
    }

    void vkCmdControlVideoCodingKHR( VkCommandBuffer                     commandBuffer,
                                     const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo );
    }
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/

#  if defined( VK_ENABLE_BETA_EXTENSIONS )
    //=== VK_KHR_video_decode_queue ===

    void vkCmdDecodeVideoKHR( VkCommandBuffer              commandBuffer,
                              const VkVideoDecodeInfoKHR * pFrameInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDecodeVideoKHR( commandBuffer, pFrameInfo );
    }
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/

    //=== VK_EXT_transform_feedback ===

    void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer      commandBuffer,
                                               uint32_t             firstBinding,
                                               uint32_t             bindingCount,
                                               const VkBuffer *     pBuffers,
                                               const VkDeviceSize * pOffsets,
                                               const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBindTransformFeedbackBuffersEXT(
        commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes );
    }

    void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer      commandBuffer,
                                         uint32_t             firstCounterBuffer,
                                         uint32_t             counterBufferCount,
                                         const VkBuffer *     pCounterBuffers,
                                         const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBeginTransformFeedbackEXT(
        commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
    }

    void vkCmdEndTransformFeedbackEXT( VkCommandBuffer      commandBuffer,
                                       uint32_t             firstCounterBuffer,
                                       uint32_t             counterBufferCount,
                                       const VkBuffer *     pCounterBuffers,
                                       const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEndTransformFeedbackEXT(
        commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
    }

    void vkCmdBeginQueryIndexedEXT( VkCommandBuffer     commandBuffer,
                                    VkQueryPool         queryPool,
                                    uint32_t            query,
                                    VkQueryControlFlags flags,
                                    uint32_t            index ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index );
    }

    void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer,
                                  VkQueryPool     queryPool,
                                  uint32_t        query,
                                  uint32_t        index ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index );
    }

    void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer,
                                        uint32_t        instanceCount,
                                        uint32_t        firstInstance,
                                        VkBuffer        counterBuffer,
                                        VkDeviceSize    counterBufferOffset,
                                        uint32_t        counterOffset,
                                        uint32_t        vertexStride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndirectByteCountEXT(
        commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
    }

    //=== VK_NVX_binary_import ===

    VkResult vkCreateCuModuleNVX( VkDevice                        device,
                                  const VkCuModuleCreateInfoNVX * pCreateInfo,
                                  const VkAllocationCallbacks *   pAllocator,
                                  VkCuModuleNVX *                 pModule ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule );
    }

    VkResult vkCreateCuFunctionNVX( VkDevice                          device,
                                    const VkCuFunctionCreateInfoNVX * pCreateInfo,
                                    const VkAllocationCallbacks *     pAllocator,
                                    VkCuFunctionNVX *                 pFunction ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction );
    }

    void vkDestroyCuModuleNVX( VkDevice                      device,
                               VkCuModuleNVX                 module,
                               const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyCuModuleNVX( device, module, pAllocator );
    }

    void vkDestroyCuFunctionNVX( VkDevice                      device,
                                 VkCuFunctionNVX               function,
                                 const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyCuFunctionNVX( device, function, pAllocator );
    }

    void vkCmdCuLaunchKernelNVX( VkCommandBuffer           commandBuffer,
                                 const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo );
    }

    //=== VK_NVX_image_view_handle ===

    uint32_t vkGetImageViewHandleNVX( VkDevice                         device,
                                      const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageViewHandleNVX( device, pInfo );
    }

    VkResult vkGetImageViewAddressNVX( VkDevice                          device,
                                       VkImageView                       imageView,
                                       VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageViewAddressNVX( device, imageView, pProperties );
    }

    //=== VK_AMD_draw_indirect_count ===

    void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer,
                                    VkBuffer        buffer,
                                    VkDeviceSize    offset,
                                    VkBuffer        countBuffer,
                                    VkDeviceSize    countBufferOffset,
                                    uint32_t        maxDrawCount,
                                    uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndirectCountAMD(
        commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
    }

    void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer,
                                           VkBuffer        buffer,
                                           VkDeviceSize    offset,
                                           VkBuffer        countBuffer,
                                           VkDeviceSize    countBufferOffset,
                                           uint32_t        maxDrawCount,
                                           uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndexedIndirectCountAMD(
        commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
    }

    //=== VK_AMD_shader_info ===

    VkResult vkGetShaderInfoAMD( VkDevice              device,
                                 VkPipeline            pipeline,
                                 VkShaderStageFlagBits shaderStage,
                                 VkShaderInfoTypeAMD   infoType,
                                 size_t *              pInfoSize,
                                 void *                pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo );
    }

#  if defined( VK_USE_PLATFORM_GGP )
    //=== VK_GGP_stream_descriptor_surface ===

    VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance                                     instance,
                                                 const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
                                                 const VkAllocationCallbacks *                  pAllocator,
                                                 VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
    }
#  endif /*VK_USE_PLATFORM_GGP*/

    //=== VK_NV_external_memory_capabilities ===

    VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
      VkPhysicalDevice                    physicalDevice,
      VkFormat                            format,
      VkImageType                         type,
      VkImageTiling                       tiling,
      VkImageUsageFlags                   usage,
      VkImageCreateFlags                  flags,
      VkExternalMemoryHandleTypeFlagsNV   externalHandleType,
      VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
        physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
    }

#  if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_NV_external_memory_win32 ===

    VkResult vkGetMemoryWin32HandleNV( VkDevice                          device,
                                       VkDeviceMemory                    memory,
                                       VkExternalMemoryHandleTypeFlagsNV handleType,
                                       HANDLE *                          pHandle ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle );
    }
#  endif /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_KHR_get_physical_device_properties2 ===

    void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice            physicalDevice,
                                          VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
    }

    void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice              physicalDevice,
                                            VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
    }

    void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice      physicalDevice,
                                                  VkFormat              format,
                                                  VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties );
    }

    VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice                         physicalDevice,
                                                           const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
                                                           VkImageFormatProperties2 * pImageFormatProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
    }

    void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice           physicalDevice,
                                                       uint32_t *                 pQueueFamilyPropertyCount,
                                                       VkQueueFamilyProperties2 * pQueueFamilyProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR(
        physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
    }

    void vkGetPhysicalDeviceMemoryProperties2KHR(
      VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
    }

    void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice,
                                                             const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
                                                             uint32_t *                       pPropertyCount,
                                                             VkSparseImageFormatProperties2 * pProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
        physicalDevice, pFormatInfo, pPropertyCount, pProperties );
    }

    //=== VK_KHR_device_group ===

    void
      vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice                   device,
                                             uint32_t                   heapIndex,
                                             uint32_t                   localDeviceIndex,
                                             uint32_t                   remoteDeviceIndex,
                                             VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceGroupPeerMemoryFeaturesKHR(
        device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
    }

    void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask );
    }

    void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer,
                               uint32_t        baseGroupX,
                               uint32_t        baseGroupY,
                               uint32_t        baseGroupZ,
                               uint32_t        groupCountX,
                               uint32_t        groupCountY,
                               uint32_t        groupCountZ ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDispatchBaseKHR(
        commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
    }

#  if defined( VK_USE_PLATFORM_VI_NN )
    //=== VK_NN_vi_surface ===

    VkResult vkCreateViSurfaceNN( VkInstance                      instance,
                                  const VkViSurfaceCreateInfoNN * pCreateInfo,
                                  const VkAllocationCallbacks *   pAllocator,
                                  VkSurfaceKHR *                  pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface );
    }
#  endif /*VK_USE_PLATFORM_VI_NN*/

    //=== VK_KHR_maintenance1 ===

    void vkTrimCommandPoolKHR( VkDevice               device,
                               VkCommandPool          commandPool,
                               VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkTrimCommandPoolKHR( device, commandPool, flags );
    }

    //=== VK_KHR_device_group_creation ===

    VkResult vkEnumeratePhysicalDeviceGroupsKHR(
      VkInstance                        instance,
      uint32_t *                        pPhysicalDeviceGroupCount,
      VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkEnumeratePhysicalDeviceGroupsKHR(
        instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
    }

    //=== VK_KHR_external_memory_capabilities ===

    void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice                           physicalDevice,
                                                         const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
                                                         VkExternalBufferProperties * pExternalBufferProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR(
        physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
    }

#  if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_KHR_external_memory_win32 ===

    VkResult vkGetMemoryWin32HandleKHR( VkDevice                              device,
                                        const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
                                        HANDLE *                              pHandle ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
    }

    VkResult vkGetMemoryWin32HandlePropertiesKHR(
      VkDevice                           device,
      VkExternalMemoryHandleTypeFlagBits handleType,
      HANDLE                             handle,
      VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties );
    }
#  endif /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_KHR_external_memory_fd ===

    VkResult
      vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd );
    }

    VkResult vkGetMemoryFdPropertiesKHR( VkDevice                           device,
                                         VkExternalMemoryHandleTypeFlagBits handleType,
                                         int                                fd,
                                         VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties );
    }

    //=== VK_KHR_external_semaphore_capabilities ===

    void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
      VkPhysicalDevice                              physicalDevice,
      const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
      VkExternalSemaphoreProperties *               pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
        physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
    }

#  if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_KHR_external_semaphore_win32 ===

    VkResult vkImportSemaphoreWin32HandleKHR(
      VkDevice                                    device,
      const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
    }

    VkResult vkGetSemaphoreWin32HandleKHR( VkDevice                                 device,
                                           const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
                                           HANDLE *                                 pHandle ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
    }
#  endif /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_KHR_external_semaphore_fd ===

    VkResult
      vkImportSemaphoreFdKHR( VkDevice                           device,
                              const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
    }

    VkResult vkGetSemaphoreFdKHR( VkDevice                        device,
                                  const VkSemaphoreGetFdInfoKHR * pGetFdInfo,
                                  int *                           pFd ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
    }

    //=== VK_KHR_push_descriptor ===

    void vkCmdPushDescriptorSetKHR( VkCommandBuffer              commandBuffer,
                                    VkPipelineBindPoint          pipelineBindPoint,
                                    VkPipelineLayout             layout,
                                    uint32_t                     set,
                                    uint32_t                     descriptorWriteCount,
                                    const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdPushDescriptorSetKHR(
        commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites );
    }

    void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer            commandBuffer,
                                                VkDescriptorUpdateTemplate descriptorUpdateTemplate,
                                                VkPipelineLayout           layout,
                                                uint32_t                   set,
                                                const void *               pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData );
    }

    //=== VK_EXT_conditional_rendering ===

    void vkCmdBeginConditionalRenderingEXT(
      VkCommandBuffer                            commandBuffer,
      const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin );
    }

    void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEndConditionalRenderingEXT( commandBuffer );
    }

    //=== VK_KHR_descriptor_update_template ===

    VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice                                     device,
                                                  const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,
                                                  const VkAllocationCallbacks *                pAllocator,
                                                  VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
    }

    void vkDestroyDescriptorUpdateTemplateKHR( VkDevice                      device,
                                               VkDescriptorUpdateTemplate    descriptorUpdateTemplate,
                                               const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator );
    }

    void vkUpdateDescriptorSetWithTemplateKHR( VkDevice                   device,
                                               VkDescriptorSet            descriptorSet,
                                               VkDescriptorUpdateTemplate descriptorUpdateTemplate,
                                               const void *               pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData );
    }

    //=== VK_NV_clip_space_w_scaling ===

    void vkCmdSetViewportWScalingNV( VkCommandBuffer              commandBuffer,
                                     uint32_t                     firstViewport,
                                     uint32_t                     viewportCount,
                                     const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings );
    }

    //=== VK_EXT_direct_mode_display ===

    VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkReleaseDisplayEXT( physicalDevice, display );
    }

#  if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
    //=== VK_EXT_acquire_xlib_display ===

    VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice,
                                      Display *        dpy,
                                      VkDisplayKHR     display ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
    }

    VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice,
                                         Display *        dpy,
                                         RROutput         rrOutput,
                                         VkDisplayKHR *   pDisplay ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
    }
#  endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/

    //=== VK_EXT_display_surface_counter ===

    VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice            physicalDevice,
                                                         VkSurfaceKHR                surface,
                                                         VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
    }

    //=== VK_EXT_display_control ===

    VkResult vkDisplayPowerControlEXT( VkDevice                      device,
                                       VkDisplayKHR                  display,
                                       const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo );
    }

    VkResult vkRegisterDeviceEventEXT( VkDevice                      device,
                                       const VkDeviceEventInfoEXT *  pDeviceEventInfo,
                                       const VkAllocationCallbacks * pAllocator,
                                       VkFence *                     pFence ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence );
    }

    VkResult vkRegisterDisplayEventEXT( VkDevice                      device,
                                        VkDisplayKHR                  display,
                                        const VkDisplayEventInfoEXT * pDisplayEventInfo,
                                        const VkAllocationCallbacks * pAllocator,
                                        VkFence *                     pFence ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence );
    }

    VkResult vkGetSwapchainCounterEXT( VkDevice                    device,
                                       VkSwapchainKHR              swapchain,
                                       VkSurfaceCounterFlagBitsEXT counter,
                                       uint64_t *                  pCounterValue ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue );
    }

    //=== VK_GOOGLE_display_timing ===

    VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice                       device,
                                              VkSwapchainKHR                 swapchain,
                                              VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
    }

    VkResult vkGetPastPresentationTimingGOOGLE( VkDevice                         device,
                                                VkSwapchainKHR                   swapchain,
                                                uint32_t *                       pPresentationTimingCount,
                                                VkPastPresentationTimingGOOGLE * pPresentationTimings ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings );
    }

    //=== VK_EXT_discard_rectangles ===

    void vkCmdSetDiscardRectangleEXT( VkCommandBuffer  commandBuffer,
                                      uint32_t         firstDiscardRectangle,
                                      uint32_t         discardRectangleCount,
                                      const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDiscardRectangleEXT(
        commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles );
    }

    //=== VK_EXT_hdr_metadata ===

    void vkSetHdrMetadataEXT( VkDevice                 device,
                              uint32_t                 swapchainCount,
                              const VkSwapchainKHR *   pSwapchains,
                              const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata );
    }

    //=== VK_KHR_create_renderpass2 ===

    VkResult vkCreateRenderPass2KHR( VkDevice                        device,
                                     const VkRenderPassCreateInfo2 * pCreateInfo,
                                     const VkAllocationCallbacks *   pAllocator,
                                     VkRenderPass *                  pRenderPass ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass );
    }

    void vkCmdBeginRenderPass2KHR( VkCommandBuffer               commandBuffer,
                                   const VkRenderPassBeginInfo * pRenderPassBegin,
                                   const VkSubpassBeginInfo *    pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
    }

    void vkCmdNextSubpass2KHR( VkCommandBuffer            commandBuffer,
                               const VkSubpassBeginInfo * pSubpassBeginInfo,
                               const VkSubpassEndInfo *   pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
    }

    void vkCmdEndRenderPass2KHR( VkCommandBuffer          commandBuffer,
                                 const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo );
    }

    //=== VK_KHR_shared_presentable_image ===

    VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetSwapchainStatusKHR( device, swapchain );
    }

    //=== VK_KHR_external_fence_capabilities ===

    void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice                          physicalDevice,
                                                        const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
                                                        VkExternalFenceProperties * pExternalFenceProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceExternalFencePropertiesKHR(
        physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
    }

#  if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_KHR_external_fence_win32 ===

    VkResult vkImportFenceWin32HandleKHR(
      VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
    }

    VkResult vkGetFenceWin32HandleKHR( VkDevice                             device,
                                       const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
                                       HANDLE *                             pHandle ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
    }
#  endif /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_KHR_external_fence_fd ===

    VkResult vkImportFenceFdKHR( VkDevice                       device,
                                 const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
    }

    VkResult
      vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd );
    }

    //=== VK_KHR_performance_query ===

    VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
      VkPhysicalDevice                     physicalDevice,
      uint32_t                             queueFamilyIndex,
      uint32_t *                           pCounterCount,
      VkPerformanceCounterKHR *            pCounters,
      VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
        physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
    }

    void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
      VkPhysicalDevice                            physicalDevice,
      const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
      uint32_t *                                  pNumPasses ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
        physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
    }

    VkResult vkAcquireProfilingLockKHR( VkDevice                              device,
                                        const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkAcquireProfilingLockKHR( device, pInfo );
    }

    void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkReleaseProfilingLockKHR( device );
    }

    //=== VK_KHR_get_surface_capabilities2 ===

    VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice                        physicalDevice,
                                                         const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
                                                         VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
    }

    VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice                        physicalDevice,
                                                    const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
                                                    uint32_t *                              pSurfaceFormatCount,
                                                    VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSurfaceFormats2KHR(
        physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
    }

    //=== VK_KHR_get_display_properties2 ===

    VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice          physicalDevice,
                                                       uint32_t *                pPropertyCount,
                                                       VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties );
    }

    VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice               physicalDevice,
                                                            uint32_t *                     pPropertyCount,
                                                            VkDisplayPlaneProperties2KHR * pProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
    }

    VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice              physicalDevice,
                                             VkDisplayKHR                  display,
                                             uint32_t *                    pPropertyCount,
                                             VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
    }

    VkResult
      vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice                 physicalDevice,
                                         const VkDisplayPlaneInfo2KHR *   pDisplayPlaneInfo,
                                         VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
    }

#  if defined( VK_USE_PLATFORM_IOS_MVK )
    //=== VK_MVK_ios_surface ===

    VkResult vkCreateIOSSurfaceMVK( VkInstance                        instance,
                                    const VkIOSSurfaceCreateInfoMVK * pCreateInfo,
                                    const VkAllocationCallbacks *     pAllocator,
                                    VkSurfaceKHR *                    pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
    }
#  endif /*VK_USE_PLATFORM_IOS_MVK*/

#  if defined( VK_USE_PLATFORM_MACOS_MVK )
    //=== VK_MVK_macos_surface ===

    VkResult vkCreateMacOSSurfaceMVK( VkInstance                          instance,
                                      const VkMacOSSurfaceCreateInfoMVK * pCreateInfo,
                                      const VkAllocationCallbacks *       pAllocator,
                                      VkSurfaceKHR *                      pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
    }
#  endif /*VK_USE_PLATFORM_MACOS_MVK*/

    //=== VK_EXT_debug_utils ===

    VkResult vkSetDebugUtilsObjectNameEXT( VkDevice                              device,
                                           const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
    }

    VkResult vkSetDebugUtilsObjectTagEXT( VkDevice                             device,
                                          const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
    }

    void vkQueueBeginDebugUtilsLabelEXT( VkQueue                      queue,
                                         const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo );
    }

    void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkQueueEndDebugUtilsLabelEXT( queue );
    }

    void vkQueueInsertDebugUtilsLabelEXT( VkQueue                      queue,
                                          const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo );
    }

    void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer              commandBuffer,
                                       const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
    }

    void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer );
    }

    void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer              commandBuffer,
                                        const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
    }

    VkResult vkCreateDebugUtilsMessengerEXT( VkInstance                                 instance,
                                             const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo,
                                             const VkAllocationCallbacks *              pAllocator,
                                             VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
    }

    void vkDestroyDebugUtilsMessengerEXT( VkInstance                    instance,
                                          VkDebugUtilsMessengerEXT      messenger,
                                          const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator );
    }

    void vkSubmitDebugUtilsMessageEXT( VkInstance                                   instance,
                                       VkDebugUtilsMessageSeverityFlagBitsEXT       messageSeverity,
                                       VkDebugUtilsMessageTypeFlagsEXT              messageTypes,
                                       const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
    }

#  if defined( VK_USE_PLATFORM_ANDROID_KHR )
    //=== VK_ANDROID_external_memory_android_hardware_buffer ===

    VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice                                   device,
                                                          const struct AHardwareBuffer *             buffer,
                                                          VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties );
    }

    VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice                                            device,
                                                      const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
                                                      struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer );
    }
#  endif /*VK_USE_PLATFORM_ANDROID_KHR*/

    //=== VK_EXT_sample_locations ===

    void vkCmdSetSampleLocationsEXT( VkCommandBuffer                  commandBuffer,
                                     const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo );
    }

    void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice             physicalDevice,
                                                      VkSampleCountFlagBits        samples,
                                                      VkMultisamplePropertiesEXT * pMultisampleProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
    }

    //=== VK_KHR_get_memory_requirements2 ===

    void vkGetImageMemoryRequirements2KHR( VkDevice                               device,
                                           const VkImageMemoryRequirementsInfo2 * pInfo,
                                           VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
    }

    void vkGetBufferMemoryRequirements2KHR( VkDevice                                device,
                                            const VkBufferMemoryRequirementsInfo2 * pInfo,
                                            VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
    }

    void vkGetImageSparseMemoryRequirements2KHR( VkDevice                                     device,
                                                 const VkImageSparseMemoryRequirementsInfo2 * pInfo,
                                                 uint32_t *                         pSparseMemoryRequirementCount,
                                                 VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageSparseMemoryRequirements2KHR(
        device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
    }

    //=== VK_KHR_acceleration_structure ===

    VkResult
      vkCreateAccelerationStructureKHR( VkDevice                                     device,
                                        const VkAccelerationStructureCreateInfoKHR * pCreateInfo,
                                        const VkAllocationCallbacks *                pAllocator,
                                        VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure );
    }

    void vkDestroyAccelerationStructureKHR( VkDevice                      device,
                                            VkAccelerationStructureKHR    accelerationStructure,
                                            const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator );
    }

    void vkCmdBuildAccelerationStructuresKHR(
      VkCommandBuffer                                          commandBuffer,
      uint32_t                                                 infoCount,
      const VkAccelerationStructureBuildGeometryInfoKHR *      pInfos,
      const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos );
    }

    void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer                                     commandBuffer,
                                                      uint32_t                                            infoCount,
                                                      const VkAccelerationStructureBuildGeometryInfoKHR * pInfos,
                                                      const VkDeviceAddress *  pIndirectDeviceAddresses,
                                                      const uint32_t *         pIndirectStrides,
                                                      const uint32_t * const * ppMaxPrimitiveCounts ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBuildAccelerationStructuresIndirectKHR(
        commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts );
    }

    VkResult vkBuildAccelerationStructuresKHR(
      VkDevice                                                 device,
      VkDeferredOperationKHR                                   deferredOperation,
      uint32_t                                                 infoCount,
      const VkAccelerationStructureBuildGeometryInfoKHR *      pInfos,
      const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos );
    }

    VkResult
      vkCopyAccelerationStructureKHR( VkDevice                                   device,
                                      VkDeferredOperationKHR                     deferredOperation,
                                      const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo );
    }

    VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice               device,
                                                     VkDeferredOperationKHR deferredOperation,
                                                     const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo );
    }

    VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice               device,
                                                     VkDeferredOperationKHR deferredOperation,
                                                     const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo );
    }

    VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice                           device,
                                                         uint32_t                           accelerationStructureCount,
                                                         const VkAccelerationStructureKHR * pAccelerationStructures,
                                                         VkQueryType                        queryType,
                                                         size_t                             dataSize,
                                                         void *                             pData,
                                                         size_t stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkWriteAccelerationStructuresPropertiesKHR(
        device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride );
    }

    void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer                            commandBuffer,
                                            const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo );
    }

    void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer                                    commandBuffer,
                                                    const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo );
    }

    void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer                                    commandBuffer,
                                                    const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo );
    }

    VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR(
      VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo );
    }

    void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer                    commandBuffer,
                                                        uint32_t                           accelerationStructureCount,
                                                        const VkAccelerationStructureKHR * pAccelerationStructures,
                                                        VkQueryType                        queryType,
                                                        VkQueryPool                        queryPool,
                                                        uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdWriteAccelerationStructuresPropertiesKHR(
        commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
    }

    void vkGetDeviceAccelerationStructureCompatibilityKHR(
      VkDevice                                      device,
      const VkAccelerationStructureVersionInfoKHR * pVersionInfo,
      VkAccelerationStructureCompatibilityKHR *     pCompatibility ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility );
    }

    void vkGetAccelerationStructureBuildSizesKHR( VkDevice                                            device,
                                                  VkAccelerationStructureBuildTypeKHR                 buildType,
                                                  const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
                                                  const uint32_t *                           pMaxPrimitiveCounts,
                                                  VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo );
    }

    //=== VK_KHR_sampler_ycbcr_conversion ===

    VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice                                   device,
                                                const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,
                                                const VkAllocationCallbacks *              pAllocator,
                                                VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion );
    }

    void vkDestroySamplerYcbcrConversionKHR( VkDevice                      device,
                                             VkSamplerYcbcrConversion      ycbcrConversion,
                                             const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator );
    }

    //=== VK_KHR_bind_memory2 ===

    VkResult vkBindBufferMemory2KHR( VkDevice                       device,
                                     uint32_t                       bindInfoCount,
                                     const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
    }

    VkResult vkBindImageMemory2KHR( VkDevice                      device,
                                    uint32_t                      bindInfoCount,
                                    const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
    }

    //=== VK_EXT_image_drm_format_modifier ===

    VkResult vkGetImageDrmFormatModifierPropertiesEXT(
      VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties );
    }

    //=== VK_EXT_validation_cache ===

    VkResult vkCreateValidationCacheEXT( VkDevice                               device,
                                         const VkValidationCacheCreateInfoEXT * pCreateInfo,
                                         const VkAllocationCallbacks *          pAllocator,
                                         VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache );
    }

    void vkDestroyValidationCacheEXT( VkDevice                      device,
                                      VkValidationCacheEXT          validationCache,
                                      const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator );
    }

    VkResult vkMergeValidationCachesEXT( VkDevice                     device,
                                         VkValidationCacheEXT         dstCache,
                                         uint32_t                     srcCacheCount,
                                         const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
    }

    VkResult vkGetValidationCacheDataEXT( VkDevice             device,
                                          VkValidationCacheEXT validationCache,
                                          size_t *             pDataSize,
                                          void *               pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
    }

    //=== VK_NV_shading_rate_image ===

    void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer,
                                      VkImageView     imageView,
                                      VkImageLayout   imageLayout ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout );
    }

    void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer                commandBuffer,
                                               uint32_t                       firstViewport,
                                               uint32_t                       viewportCount,
                                               const VkShadingRatePaletteNV * pShadingRatePalettes ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetViewportShadingRatePaletteNV(
        commandBuffer, firstViewport, viewportCount, pShadingRatePalettes );
    }

    void
      vkCmdSetCoarseSampleOrderNV( VkCommandBuffer                     commandBuffer,
                                   VkCoarseSampleOrderTypeNV           sampleOrderType,
                                   uint32_t                            customSampleOrderCount,
                                   const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetCoarseSampleOrderNV(
        commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
    }

    //=== VK_NV_ray_tracing ===

    VkResult
      vkCreateAccelerationStructureNV( VkDevice                                    device,
                                       const VkAccelerationStructureCreateInfoNV * pCreateInfo,
                                       const VkAllocationCallbacks *               pAllocator,
                                       VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
    }

    void vkDestroyAccelerationStructureNV( VkDevice                      device,
                                           VkAccelerationStructureNV     accelerationStructure,
                                           const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator );
    }

    void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice                                                device,
                                                         const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo,
                                                         VkMemoryRequirements2KHR * pMemoryRequirements ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
    }

    VkResult vkBindAccelerationStructureMemoryNV( VkDevice                                        device,
                                                  uint32_t                                        bindInfoCount,
                                                  const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
    }

    void vkCmdBuildAccelerationStructureNV( VkCommandBuffer                       commandBuffer,
                                            const VkAccelerationStructureInfoNV * pInfo,
                                            VkBuffer                              instanceData,
                                            VkDeviceSize                          instanceOffset,
                                            VkBool32                              update,
                                            VkAccelerationStructureNV             dst,
                                            VkAccelerationStructureNV             src,
                                            VkBuffer                              scratch,
                                            VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBuildAccelerationStructureNV(
        commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
    }

    void vkCmdCopyAccelerationStructureNV( VkCommandBuffer                    commandBuffer,
                                           VkAccelerationStructureNV          dst,
                                           VkAccelerationStructureNV          src,
                                           VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode );
    }

    void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer,
                           VkBuffer        raygenShaderBindingTableBuffer,
                           VkDeviceSize    raygenShaderBindingOffset,
                           VkBuffer        missShaderBindingTableBuffer,
                           VkDeviceSize    missShaderBindingOffset,
                           VkDeviceSize    missShaderBindingStride,
                           VkBuffer        hitShaderBindingTableBuffer,
                           VkDeviceSize    hitShaderBindingOffset,
                           VkDeviceSize    hitShaderBindingStride,
                           VkBuffer        callableShaderBindingTableBuffer,
                           VkDeviceSize    callableShaderBindingOffset,
                           VkDeviceSize    callableShaderBindingStride,
                           uint32_t        width,
                           uint32_t        height,
                           uint32_t        depth ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdTraceRaysNV( commandBuffer,
                                 raygenShaderBindingTableBuffer,
                                 raygenShaderBindingOffset,
                                 missShaderBindingTableBuffer,
                                 missShaderBindingOffset,
                                 missShaderBindingStride,
                                 hitShaderBindingTableBuffer,
                                 hitShaderBindingOffset,
                                 hitShaderBindingStride,
                                 callableShaderBindingTableBuffer,
                                 callableShaderBindingOffset,
                                 callableShaderBindingStride,
                                 width,
                                 height,
                                 depth );
    }

    VkResult vkCreateRayTracingPipelinesNV( VkDevice                                 device,
                                            VkPipelineCache                          pipelineCache,
                                            uint32_t                                 createInfoCount,
                                            const VkRayTracingPipelineCreateInfoNV * pCreateInfos,
                                            const VkAllocationCallbacks *            pAllocator,
                                            VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateRayTracingPipelinesNV(
        device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
    }

    VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice   device,
                                                  VkPipeline pipeline,
                                                  uint32_t   firstGroup,
                                                  uint32_t   groupCount,
                                                  size_t     dataSize,
                                                  void *     pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
    }

    VkResult vkGetAccelerationStructureHandleNV( VkDevice                  device,
                                                 VkAccelerationStructureNV accelerationStructure,
                                                 size_t                    dataSize,
                                                 void *                    pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData );
    }

    void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer                   commandBuffer,
                                                       uint32_t                          accelerationStructureCount,
                                                       const VkAccelerationStructureNV * pAccelerationStructures,
                                                       VkQueryType                       queryType,
                                                       VkQueryPool                       queryPool,
                                                       uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdWriteAccelerationStructuresPropertiesNV(
        commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
    }

    VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCompileDeferredNV( device, pipeline, shader );
    }

    //=== VK_KHR_maintenance3 ===

    void vkGetDescriptorSetLayoutSupportKHR( VkDevice                                device,
                                             const VkDescriptorSetLayoutCreateInfo * pCreateInfo,
                                             VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport );
    }

    //=== VK_KHR_draw_indirect_count ===

    void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer,
                                    VkBuffer        buffer,
                                    VkDeviceSize    offset,
                                    VkBuffer        countBuffer,
                                    VkDeviceSize    countBufferOffset,
                                    uint32_t        maxDrawCount,
                                    uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndirectCountKHR(
        commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
    }

    void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer,
                                           VkBuffer        buffer,
                                           VkDeviceSize    offset,
                                           VkBuffer        countBuffer,
                                           VkDeviceSize    countBufferOffset,
                                           uint32_t        maxDrawCount,
                                           uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawIndexedIndirectCountKHR(
        commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
    }

    //=== VK_EXT_external_memory_host ===

    VkResult vkGetMemoryHostPointerPropertiesEXT(
      VkDevice                           device,
      VkExternalMemoryHandleTypeFlagBits handleType,
      const void *                       pHostPointer,
      VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties );
    }

    //=== VK_AMD_buffer_marker ===

    void vkCmdWriteBufferMarkerAMD( VkCommandBuffer         commandBuffer,
                                    VkPipelineStageFlagBits pipelineStage,
                                    VkBuffer                dstBuffer,
                                    VkDeviceSize            dstOffset,
                                    uint32_t                marker ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker );
    }

    //=== VK_EXT_calibrated_timestamps ===

    VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice  physicalDevice,
                                                             uint32_t *        pTimeDomainCount,
                                                             VkTimeDomainEXT * pTimeDomains ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains );
    }

    VkResult vkGetCalibratedTimestampsEXT( VkDevice                             device,
                                           uint32_t                             timestampCount,
                                           const VkCalibratedTimestampInfoEXT * pTimestampInfos,
                                           uint64_t *                           pTimestamps,
                                           uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
    }

    //=== VK_NV_mesh_shader ===

    void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer,
                               uint32_t        taskCount,
                               uint32_t        firstTask ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask );
    }

    void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer,
                                       VkBuffer        buffer,
                                       VkDeviceSize    offset,
                                       uint32_t        drawCount,
                                       uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride );
    }

    void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer,
                                            VkBuffer        buffer,
                                            VkDeviceSize    offset,
                                            VkBuffer        countBuffer,
                                            VkDeviceSize    countBufferOffset,
                                            uint32_t        maxDrawCount,
                                            uint32_t        stride ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdDrawMeshTasksIndirectCountNV(
        commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
    }

    //=== VK_NV_scissor_exclusive ===

    void vkCmdSetExclusiveScissorNV( VkCommandBuffer  commandBuffer,
                                     uint32_t         firstExclusiveScissor,
                                     uint32_t         exclusiveScissorCount,
                                     const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetExclusiveScissorNV(
        commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
    }

    //=== VK_NV_device_diagnostic_checkpoints ===

    void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker );
    }

    void vkGetQueueCheckpointDataNV( VkQueue              queue,
                                     uint32_t *           pCheckpointDataCount,
                                     VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
    }

    //=== VK_KHR_timeline_semaphore ===

    VkResult vkGetSemaphoreCounterValueKHR( VkDevice    device,
                                            VkSemaphore semaphore,
                                            uint64_t *  pValue ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
    }

    VkResult vkWaitSemaphoresKHR( VkDevice                    device,
                                  const VkSemaphoreWaitInfo * pWaitInfo,
                                  uint64_t                    timeout ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
    }

    VkResult vkSignalSemaphoreKHR( VkDevice                      device,
                                   const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkSignalSemaphoreKHR( device, pSignalInfo );
    }

    //=== VK_INTEL_performance_query ===

    VkResult vkInitializePerformanceApiINTEL(
      VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
    }

    void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkUninitializePerformanceApiINTEL( device );
    }

    VkResult
      vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer                      commandBuffer,
                                      const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo );
    }

    VkResult vkCmdSetPerformanceStreamMarkerINTEL(
      VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo );
    }

    VkResult
      vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer                        commandBuffer,
                                        const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo );
    }

    VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice                                           device,
                                                     const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
                                                     VkPerformanceConfigurationINTEL * pConfiguration ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
    }

    VkResult
      vkReleasePerformanceConfigurationINTEL( VkDevice                        device,
                                              VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
    }

    VkResult
      vkQueueSetPerformanceConfigurationINTEL( VkQueue                         queue,
                                               VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration );
    }

    VkResult vkGetPerformanceParameterINTEL( VkDevice                        device,
                                             VkPerformanceParameterTypeINTEL parameter,
                                             VkPerformanceValueINTEL *       pValue ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPerformanceParameterINTEL( device, parameter, pValue );
    }

    //=== VK_AMD_display_native_hdr ===

    void vkSetLocalDimmingAMD( VkDevice       device,
                               VkSwapchainKHR swapChain,
                               VkBool32       localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
    }

#  if defined( VK_USE_PLATFORM_FUCHSIA )
    //=== VK_FUCHSIA_imagepipe_surface ===

    VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance                                  instance,
                                              const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
                                              const VkAllocationCallbacks *               pAllocator,
                                              VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
    }
#  endif /*VK_USE_PLATFORM_FUCHSIA*/

#  if defined( VK_USE_PLATFORM_METAL_EXT )
    //=== VK_EXT_metal_surface ===

    VkResult vkCreateMetalSurfaceEXT( VkInstance                          instance,
                                      const VkMetalSurfaceCreateInfoEXT * pCreateInfo,
                                      const VkAllocationCallbacks *       pAllocator,
                                      VkSurfaceKHR *                      pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
    }
#  endif /*VK_USE_PLATFORM_METAL_EXT*/

    //=== VK_KHR_fragment_shading_rate ===

    VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR(
      VkPhysicalDevice                         physicalDevice,
      uint32_t *                               pFragmentShadingRateCount,
      VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceFragmentShadingRatesKHR(
        physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates );
    }

    void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer                          commandBuffer,
                                         const VkExtent2D *                       pFragmentSize,
                                         const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps );
    }

    //=== VK_EXT_buffer_device_address ===

    VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice                          device,
                                                 const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetBufferDeviceAddressEXT( device, pInfo );
    }

    //=== VK_EXT_tooling_info ===

    VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice                    physicalDevice,
                                                   uint32_t *                          pToolCount,
                                                   VkPhysicalDeviceToolPropertiesEXT * pToolProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
    }

    //=== VK_NV_cooperative_matrix ===

    VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice                  physicalDevice,
                                                               uint32_t *                        pPropertyCount,
                                                               VkCooperativeMatrixPropertiesNV * pProperties ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
    }

    //=== VK_NV_coverage_reduction_mode ===

    VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
      VkPhysicalDevice                         physicalDevice,
      uint32_t *                               pCombinationCount,
      VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
        physicalDevice, pCombinationCount, pCombinations );
    }

#  if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_EXT_full_screen_exclusive ===

    VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice                        physicalDevice,
                                                         const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
                                                         uint32_t *                              pPresentModeCount,
                                                         VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceSurfacePresentModes2EXT(
        physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
    }

    VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain );
    }

    VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain );
    }

    VkResult
      vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice                                device,
                                               const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
                                               VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes );
    }
#  endif /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_EXT_headless_surface ===

    VkResult vkCreateHeadlessSurfaceEXT( VkInstance                             instance,
                                         const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo,
                                         const VkAllocationCallbacks *          pAllocator,
                                         VkSurfaceKHR *                         pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
    }

    //=== VK_KHR_buffer_device_address ===

    VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice                          device,
                                                 const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetBufferDeviceAddressKHR( device, pInfo );
    }

    uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice                          device,
                                                 const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo );
    }

    uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR(
      VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo );
    }

    //=== VK_EXT_line_rasterization ===

    void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer,
                                 uint32_t        lineStippleFactor,
                                 uint16_t        lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern );
    }

    //=== VK_EXT_host_query_reset ===

    void vkResetQueryPoolEXT( VkDevice    device,
                              VkQueryPool queryPool,
                              uint32_t    firstQuery,
                              uint32_t    queryCount ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
    }

    //=== VK_EXT_extended_dynamic_state ===

    void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetCullModeEXT( commandBuffer, cullMode );
    }

    void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace );
    }

    void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer     commandBuffer,
                                       VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology );
    }

    void vkCmdSetViewportWithCountEXT( VkCommandBuffer    commandBuffer,
                                       uint32_t           viewportCount,
                                       const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports );
    }

    void vkCmdSetScissorWithCountEXT( VkCommandBuffer  commandBuffer,
                                      uint32_t         scissorCount,
                                      const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors );
    }

    void vkCmdBindVertexBuffers2EXT( VkCommandBuffer      commandBuffer,
                                     uint32_t             firstBinding,
                                     uint32_t             bindingCount,
                                     const VkBuffer *     pBuffers,
                                     const VkDeviceSize * pOffsets,
                                     const VkDeviceSize * pSizes,
                                     const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBindVertexBuffers2EXT(
        commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
    }

    void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable );
    }

    void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer,
                                      VkBool32        depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable );
    }

    void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer,
                                    VkCompareOp     depthCompareOp ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp );
    }

    void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer,
                                           VkBool32        depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable );
    }

    void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer,
                                       VkBool32        stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable );
    }

    void vkCmdSetStencilOpEXT( VkCommandBuffer    commandBuffer,
                               VkStencilFaceFlags faceMask,
                               VkStencilOp        failOp,
                               VkStencilOp        passOp,
                               VkStencilOp        depthFailOp,
                               VkCompareOp        compareOp ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
    }

    //=== VK_KHR_deferred_host_operations ===

    VkResult vkCreateDeferredOperationKHR( VkDevice                      device,
                                           const VkAllocationCallbacks * pAllocator,
                                           VkDeferredOperationKHR *      pDeferredOperation ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation );
    }

    void vkDestroyDeferredOperationKHR( VkDevice                      device,
                                        VkDeferredOperationKHR        operation,
                                        const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator );
    }

    uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice               device,
                                                      VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation );
    }

    VkResult vkGetDeferredOperationResultKHR( VkDevice               device,
                                              VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetDeferredOperationResultKHR( device, operation );
    }

    VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDeferredOperationJoinKHR( device, operation );
    }

    //=== VK_KHR_pipeline_executable_properties ===

    VkResult
      vkGetPipelineExecutablePropertiesKHR( VkDevice                            device,
                                            const VkPipelineInfoKHR *           pPipelineInfo,
                                            uint32_t *                          pExecutableCount,
                                            VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties );
    }

    VkResult
      vkGetPipelineExecutableStatisticsKHR( VkDevice                            device,
                                            const VkPipelineExecutableInfoKHR * pExecutableInfo,
                                            uint32_t *                          pStatisticCount,
                                            VkPipelineExecutableStatisticKHR *  pStatistics ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics );
    }

    VkResult vkGetPipelineExecutableInternalRepresentationsKHR(
      VkDevice                                        device,
      const VkPipelineExecutableInfoKHR *             pExecutableInfo,
      uint32_t *                                      pInternalRepresentationCount,
      VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPipelineExecutableInternalRepresentationsKHR(
        device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
    }

    //=== VK_NV_device_generated_commands ===

    void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice                                            device,
                                                     const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo,
                                                     VkMemoryRequirements2 * pMemoryRequirements ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
    }

    void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer                   commandBuffer,
                                             const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo );
    }

    void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer                   commandBuffer,
                                          VkBool32                          isPreprocessed,
                                          const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo );
    }

    void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer     commandBuffer,
                                         VkPipelineBindPoint pipelineBindPoint,
                                         VkPipeline          pipeline,
                                         uint32_t            groupIndex ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex );
    }

    VkResult
      vkCreateIndirectCommandsLayoutNV( VkDevice                                     device,
                                        const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo,
                                        const VkAllocationCallbacks *                pAllocator,
                                        VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout );
    }

    void vkDestroyIndirectCommandsLayoutNV( VkDevice                      device,
                                            VkIndirectCommandsLayoutNV    indirectCommandsLayout,
                                            const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator );
    }

    //=== VK_EXT_private_data ===

    VkResult vkCreatePrivateDataSlotEXT( VkDevice                               device,
                                         const VkPrivateDataSlotCreateInfoEXT * pCreateInfo,
                                         const VkAllocationCallbacks *          pAllocator,
                                         VkPrivateDataSlotEXT * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot );
    }

    void vkDestroyPrivateDataSlotEXT( VkDevice                      device,
                                      VkPrivateDataSlotEXT          privateDataSlot,
                                      const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator );
    }

    VkResult vkSetPrivateDataEXT( VkDevice             device,
                                  VkObjectType         objectType,
                                  uint64_t             objectHandle,
                                  VkPrivateDataSlotEXT privateDataSlot,
                                  uint64_t             data ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data );
    }

    void vkGetPrivateDataEXT( VkDevice             device,
                              VkObjectType         objectType,
                              uint64_t             objectHandle,
                              VkPrivateDataSlotEXT privateDataSlot,
                              uint64_t *           pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData );
    }

#  if defined( VK_ENABLE_BETA_EXTENSIONS )
    //=== VK_KHR_video_encode_queue ===

    void vkCmdEncodeVideoKHR( VkCommandBuffer              commandBuffer,
                              const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo );
    }
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/

    //=== VK_KHR_synchronization2 ===

    void vkCmdSetEvent2KHR( VkCommandBuffer             commandBuffer,
                            VkEvent                     event,
                            const VkDependencyInfoKHR * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo );
    }

    void vkCmdResetEvent2KHR( VkCommandBuffer          commandBuffer,
                              VkEvent                  event,
                              VkPipelineStageFlags2KHR stageMask ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask );
    }

    void vkCmdWaitEvents2KHR( VkCommandBuffer             commandBuffer,
                              uint32_t                    eventCount,
                              const VkEvent *             pEvents,
                              const VkDependencyInfoKHR * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos );
    }

    void vkCmdPipelineBarrier2KHR( VkCommandBuffer             commandBuffer,
                                   const VkDependencyInfoKHR * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo );
    }

    void vkCmdWriteTimestamp2KHR( VkCommandBuffer          commandBuffer,
                                  VkPipelineStageFlags2KHR stage,
                                  VkQueryPool              queryPool,
                                  uint32_t                 query ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query );
    }

    VkResult vkQueueSubmit2KHR( VkQueue                  queue,
                                uint32_t                 submitCount,
                                const VkSubmitInfo2KHR * pSubmits,
                                VkFence                  fence ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence );
    }

    void vkCmdWriteBufferMarker2AMD( VkCommandBuffer          commandBuffer,
                                     VkPipelineStageFlags2KHR stage,
                                     VkBuffer                 dstBuffer,
                                     VkDeviceSize             dstOffset,
                                     uint32_t                 marker ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker );
    }

    void vkGetQueueCheckpointData2NV( VkQueue               queue,
                                      uint32_t *            pCheckpointDataCount,
                                      VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData );
    }

    //=== VK_NV_fragment_shading_rate_enums ===

    void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer                          commandBuffer,
                                            VkFragmentShadingRateNV                  shadingRate,
                                            const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps );
    }

    //=== VK_KHR_copy_commands2 ===

    void vkCmdCopyBuffer2KHR( VkCommandBuffer              commandBuffer,
                              const VkCopyBufferInfo2KHR * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo );
    }

    void vkCmdCopyImage2KHR( VkCommandBuffer             commandBuffer,
                             const VkCopyImageInfo2KHR * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo );
    }

    void
      vkCmdCopyBufferToImage2KHR( VkCommandBuffer                     commandBuffer,
                                  const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo );
    }

    void
      vkCmdCopyImageToBuffer2KHR( VkCommandBuffer                     commandBuffer,
                                  const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo );
    }

    void vkCmdBlitImage2KHR( VkCommandBuffer             commandBuffer,
                             const VkBlitImageInfo2KHR * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo );
    }

    void vkCmdResolveImage2KHR( VkCommandBuffer                commandBuffer,
                                const VkResolveImageInfo2KHR * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo );
    }

#  if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_NV_acquire_winrt_display ===

    VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkAcquireWinrtDisplayNV( physicalDevice, display );
    }

    VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice,
                                  uint32_t         deviceRelativeId,
                                  VkDisplayKHR *   pDisplay ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay );
    }
#  endif /*VK_USE_PLATFORM_WIN32_KHR*/

#  if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
    //=== VK_EXT_directfb_surface ===

    VkResult vkCreateDirectFBSurfaceEXT( VkInstance                             instance,
                                         const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo,
                                         const VkAllocationCallbacks *          pAllocator,
                                         VkSurfaceKHR *                         pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
    }

    VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice,
                                                                uint32_t         queueFamilyIndex,
                                                                IDirectFB *      dfb ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb );
    }
#  endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/

    //=== VK_KHR_ray_tracing_pipeline ===

    void vkCmdTraceRaysKHR( VkCommandBuffer                         commandBuffer,
                            const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
                            const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable,
                            const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable,
                            const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
                            uint32_t                                width,
                            uint32_t                                height,
                            uint32_t                                depth ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdTraceRaysKHR( commandBuffer,
                                  pRaygenShaderBindingTable,
                                  pMissShaderBindingTable,
                                  pHitShaderBindingTable,
                                  pCallableShaderBindingTable,
                                  width,
                                  height,
                                  depth );
    }

    VkResult vkCreateRayTracingPipelinesKHR( VkDevice                                  device,
                                             VkDeferredOperationKHR                    deferredOperation,
                                             VkPipelineCache                           pipelineCache,
                                             uint32_t                                  createInfoCount,
                                             const VkRayTracingPipelineCreateInfoKHR * pCreateInfos,
                                             const VkAllocationCallbacks *             pAllocator,
                                             VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateRayTracingPipelinesKHR(
        device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
    }

    VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice   device,
                                                   VkPipeline pipeline,
                                                   uint32_t   firstGroup,
                                                   uint32_t   groupCount,
                                                   size_t     dataSize,
                                                   void *     pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
    }

    VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice   device,
                                                                VkPipeline pipeline,
                                                                uint32_t   firstGroup,
                                                                uint32_t   groupCount,
                                                                size_t     dataSize,
                                                                void *     pData ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
        device, pipeline, firstGroup, groupCount, dataSize, pData );
    }

    void vkCmdTraceRaysIndirectKHR( VkCommandBuffer                         commandBuffer,
                                    const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
                                    const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable,
                                    const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable,
                                    const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
                                    VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdTraceRaysIndirectKHR( commandBuffer,
                                          pRaygenShaderBindingTable,
                                          pMissShaderBindingTable,
                                          pHitShaderBindingTable,
                                          pCallableShaderBindingTable,
                                          indirectDeviceAddress );
    }

    VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice               device,
                                                         VkPipeline             pipeline,
                                                         uint32_t               group,
                                                         VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader );
    }

    void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer,
                                                 uint32_t        pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize );
    }

    //=== VK_EXT_vertex_input_dynamic_state ===

    void vkCmdSetVertexInputEXT( VkCommandBuffer                               commandBuffer,
                                 uint32_t                                      vertexBindingDescriptionCount,
                                 const VkVertexInputBindingDescription2EXT *   pVertexBindingDescriptions,
                                 uint32_t                                      vertexAttributeDescriptionCount,
                                 const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const
      VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetVertexInputEXT( commandBuffer,
                                       vertexBindingDescriptionCount,
                                       pVertexBindingDescriptions,
                                       vertexAttributeDescriptionCount,
                                       pVertexAttributeDescriptions );
    }

#  if defined( VK_USE_PLATFORM_FUCHSIA )
    //=== VK_FUCHSIA_external_memory ===

    VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice                                   device,
                                             const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
                                             zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle );
    }

    VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA(
      VkDevice                                device,
      VkExternalMemoryHandleTypeFlagBits      handleType,
      zx_handle_t                             zirconHandle,
      VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetMemoryZirconHandlePropertiesFUCHSIA(
        device, handleType, zirconHandle, pMemoryZirconHandleProperties );
    }
#  endif /*VK_USE_PLATFORM_FUCHSIA*/

#  if defined( VK_USE_PLATFORM_FUCHSIA )
    //=== VK_FUCHSIA_external_semaphore ===

    VkResult vkImportSemaphoreZirconHandleFUCHSIA(
      VkDevice                                         device,
      const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo );
    }

    VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice                                      device,
                                                const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
                                                zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle );
    }
#  endif /*VK_USE_PLATFORM_FUCHSIA*/

    //=== VK_EXT_extended_dynamic_state2 ===

    void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer,
                                        uint32_t        patchControlPoints ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints );
    }

    void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer,
                                             VkBool32        rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable );
    }

    void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable );
    }

    void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp );
    }

    void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer,
                                            VkBool32        primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable );
    }

#  if defined( VK_USE_PLATFORM_SCREEN_QNX )
    //=== VK_QNX_screen_surface ===

    VkResult vkCreateScreenSurfaceQNX( VkInstance                           instance,
                                       const VkScreenSurfaceCreateInfoQNX * pCreateInfo,
                                       const VkAllocationCallbacks *        pAllocator,
                                       VkSurfaceKHR *                       pSurface ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface );
    }

    VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice        physicalDevice,
                                                              uint32_t                queueFamilyIndex,
                                                              struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window );
    }
#  endif /*VK_USE_PLATFORM_SCREEN_QNX*/

    //=== VK_EXT_color_write_enable ===

    void vkCmdSetColorWriteEnableEXT( VkCommandBuffer  commandBuffer,
                                      uint32_t         attachmentCount,
                                      const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT
    {
      return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables );
    }
  };
#endif

  class DispatchLoaderDynamic;
#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC )
#  if defined( VK_NO_PROTOTYPES )
#    define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1
#  else
#    define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0
#  endif
#endif

#if !defined( VULKAN_HPP_STORAGE_API )
#  if defined( VULKAN_HPP_STORAGE_SHARED )
#    if defined( _MSC_VER )
#      if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
#        define VULKAN_HPP_STORAGE_API __declspec( dllexport )
#      else
#        define VULKAN_HPP_STORAGE_API __declspec( dllimport )
#      endif
#    elif defined( __clang__ ) || defined( __GNUC__ )
#      if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
#        define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) )
#      else
#        define VULKAN_HPP_STORAGE_API
#      endif
#    else
#      define VULKAN_HPP_STORAGE_API
#      pragma warning Unknown import / export semantics
#    endif
#  else
#    define VULKAN_HPP_STORAGE_API
#  endif
#endif

#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER )
#  if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
#    define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic
#    define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE                     \
      namespace VULKAN_HPP_NAMESPACE                                               \
      {                                                                            \
        VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \
      }
  extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic;
#  else
#    define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic()
#    define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
#  endif
#endif

#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE )
#  if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
#    define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic
#  else
#    define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic
#  endif
#endif

#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER )
#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT
#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT
#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT
#else
#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {}
#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr
#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER
#endif

  struct AllocationCallbacks;

  template <typename OwnerType, typename Dispatch>
  class ObjectDestroy
  {
  public:
    ObjectDestroy() = default;

    ObjectDestroy( OwnerType                           owner,
                   Optional<const AllocationCallbacks> allocationCallbacks
                                                       VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                   Dispatch const &                    dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
      : m_owner( owner )
      , m_allocationCallbacks( allocationCallbacks )
      , m_dispatch( &dispatch )
    {}

    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
    {
      return m_owner;
    }
    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT
    {
      return m_allocationCallbacks;
    }

  protected:
    template <typename T>
    void destroy( T t ) VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
      m_owner.destroy( t, m_allocationCallbacks, *m_dispatch );
    }

  private:
    OwnerType                           m_owner               = {};
    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
    Dispatch const *                    m_dispatch            = nullptr;
  };

  class NoParent;

  template <typename Dispatch>
  class ObjectDestroy<NoParent, Dispatch>
  {
  public:
    ObjectDestroy() = default;

    ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks,
                   Dispatch const &                    dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
      : m_allocationCallbacks( allocationCallbacks )
      , m_dispatch( &dispatch )
    {}

    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT
    {
      return m_allocationCallbacks;
    }

  protected:
    template <typename T>
    void destroy( T t ) VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( m_dispatch );
      t.destroy( m_allocationCallbacks, *m_dispatch );
    }

  private:
    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
    Dispatch const *                    m_dispatch            = nullptr;
  };

  template <typename OwnerType, typename Dispatch>
  class ObjectFree
  {
  public:
    ObjectFree() = default;

    ObjectFree( OwnerType                           owner,
                Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
      : m_owner( owner )
      , m_allocationCallbacks( allocationCallbacks )
      , m_dispatch( &dispatch )
    {}

    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
    {
      return m_owner;
    }

    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT
    {
      return m_allocationCallbacks;
    }

  protected:
    template <typename T>
    void destroy( T t ) VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
      m_owner.free( t, m_allocationCallbacks, *m_dispatch );
    }

  private:
    OwnerType                           m_owner               = {};
    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
    Dispatch const *                    m_dispatch            = nullptr;
  };

  template <typename OwnerType, typename Dispatch>
  class ObjectRelease
  {
  public:
    ObjectRelease() = default;

    ObjectRelease( OwnerType owner, Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
      : m_owner( owner )
      , m_dispatch( &dispatch )
    {}

    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
    {
      return m_owner;
    }

  protected:
    template <typename T>
    void destroy( T t ) VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
      m_owner.release( t, *m_dispatch );
    }

  private:
    OwnerType        m_owner    = {};
    Dispatch const * m_dispatch = nullptr;
  };

  template <typename OwnerType, typename PoolType, typename Dispatch>
  class PoolFree
  {
  public:
    PoolFree() = default;

    PoolFree( OwnerType        owner,
              PoolType         pool,
              Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
      : m_owner( owner )
      , m_pool( pool )
      , m_dispatch( &dispatch )
    {}

    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
    {
      return m_owner;
    }
    PoolType getPool() const VULKAN_HPP_NOEXCEPT
    {
      return m_pool;
    }

  protected:
    template <typename T>
    void destroy( T t ) VULKAN_HPP_NOEXCEPT
    {
      m_owner.free( m_pool, t, *m_dispatch );
    }

  private:
    OwnerType        m_owner    = OwnerType();
    PoolType         m_pool     = PoolType();
    Dispatch const * m_dispatch = nullptr;
  };

  using Bool32        = uint32_t;
  using DeviceAddress = uint64_t;
  using DeviceSize    = uint64_t;
  using SampleMask    = uint32_t;

  template <typename EnumType, EnumType value>
  struct CppType
  {};

  template <typename Type>
  struct isVulkanHandleType
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false;
  };

  VULKAN_HPP_INLINE std::string toHexString( uint32_t value )
  {
    std::stringstream stream;
    stream << std::hex << value;
    return stream.str();
  }

  //=============
  //=== ENUMs ===
  //=============

  //=== VK_VERSION_1_0 ===

  enum class Result
  {
    eSuccess                                     = VK_SUCCESS,
    eNotReady                                    = VK_NOT_READY,
    eTimeout                                     = VK_TIMEOUT,
    eEventSet                                    = VK_EVENT_SET,
    eEventReset                                  = VK_EVENT_RESET,
    eIncomplete                                  = VK_INCOMPLETE,
    eErrorOutOfHostMemory                        = VK_ERROR_OUT_OF_HOST_MEMORY,
    eErrorOutOfDeviceMemory                      = VK_ERROR_OUT_OF_DEVICE_MEMORY,
    eErrorInitializationFailed                   = VK_ERROR_INITIALIZATION_FAILED,
    eErrorDeviceLost                             = VK_ERROR_DEVICE_LOST,
    eErrorMemoryMapFailed                        = VK_ERROR_MEMORY_MAP_FAILED,
    eErrorLayerNotPresent                        = VK_ERROR_LAYER_NOT_PRESENT,
    eErrorExtensionNotPresent                    = VK_ERROR_EXTENSION_NOT_PRESENT,
    eErrorFeatureNotPresent                      = VK_ERROR_FEATURE_NOT_PRESENT,
    eErrorIncompatibleDriver                     = VK_ERROR_INCOMPATIBLE_DRIVER,
    eErrorTooManyObjects                         = VK_ERROR_TOO_MANY_OBJECTS,
    eErrorFormatNotSupported                     = VK_ERROR_FORMAT_NOT_SUPPORTED,
    eErrorFragmentedPool                         = VK_ERROR_FRAGMENTED_POOL,
    eErrorUnknown                                = VK_ERROR_UNKNOWN,
    eErrorOutOfPoolMemory                        = VK_ERROR_OUT_OF_POOL_MEMORY,
    eErrorInvalidExternalHandle                  = VK_ERROR_INVALID_EXTERNAL_HANDLE,
    eErrorFragmentation                          = VK_ERROR_FRAGMENTATION,
    eErrorInvalidOpaqueCaptureAddress            = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
    eErrorSurfaceLostKHR                         = VK_ERROR_SURFACE_LOST_KHR,
    eErrorNativeWindowInUseKHR                   = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
    eSuboptimalKHR                               = VK_SUBOPTIMAL_KHR,
    eErrorOutOfDateKHR                           = VK_ERROR_OUT_OF_DATE_KHR,
    eErrorIncompatibleDisplayKHR                 = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
    eErrorValidationFailedEXT                    = VK_ERROR_VALIDATION_FAILED_EXT,
    eErrorInvalidShaderNV                        = VK_ERROR_INVALID_SHADER_NV,
    eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
    eErrorNotPermittedEXT                        = VK_ERROR_NOT_PERMITTED_EXT,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    eThreadIdleKHR                       = VK_THREAD_IDLE_KHR,
    eThreadDoneKHR                       = VK_THREAD_DONE_KHR,
    eOperationDeferredKHR                = VK_OPERATION_DEFERRED_KHR,
    eOperationNotDeferredKHR             = VK_OPERATION_NOT_DEFERRED_KHR,
    ePipelineCompileRequiredEXT          = VK_PIPELINE_COMPILE_REQUIRED_EXT,
    eErrorFragmentationEXT               = VK_ERROR_FRAGMENTATION_EXT,
    eErrorInvalidDeviceAddressEXT        = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT,
    eErrorInvalidExternalHandleKHR       = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
    eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR,
    eErrorOutOfPoolMemoryKHR             = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
    eErrorPipelineCompileRequiredEXT     = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( Result value )
  {
    switch ( value )
    {
      case Result::eSuccess: return "Success";
      case Result::eNotReady: return "NotReady";
      case Result::eTimeout: return "Timeout";
      case Result::eEventSet: return "EventSet";
      case Result::eEventReset: return "EventReset";
      case Result::eIncomplete: return "Incomplete";
      case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory";
      case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory";
      case Result::eErrorInitializationFailed: return "ErrorInitializationFailed";
      case Result::eErrorDeviceLost: return "ErrorDeviceLost";
      case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed";
      case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent";
      case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent";
      case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent";
      case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver";
      case Result::eErrorTooManyObjects: return "ErrorTooManyObjects";
      case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported";
      case Result::eErrorFragmentedPool: return "ErrorFragmentedPool";
      case Result::eErrorUnknown: return "ErrorUnknown";
      case Result::eErrorOutOfPoolMemory: return "ErrorOutOfPoolMemory";
      case Result::eErrorInvalidExternalHandle: return "ErrorInvalidExternalHandle";
      case Result::eErrorFragmentation: return "ErrorFragmentation";
      case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress";
      case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR";
      case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR";
      case Result::eSuboptimalKHR: return "SuboptimalKHR";
      case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR";
      case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR";
      case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT";
      case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV";
      case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
      case Result::eErrorNotPermittedEXT: return "ErrorNotPermittedEXT";
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT";
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      case Result::eThreadIdleKHR: return "ThreadIdleKHR";
      case Result::eThreadDoneKHR: return "ThreadDoneKHR";
      case Result::eOperationDeferredKHR: return "OperationDeferredKHR";
      case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR";
      case Result::ePipelineCompileRequiredEXT: return "PipelineCompileRequiredEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class StructureType
  {
    eApplicationInfo                           = VK_STRUCTURE_TYPE_APPLICATION_INFO,
    eInstanceCreateInfo                        = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
    eDeviceQueueCreateInfo                     = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
    eDeviceCreateInfo                          = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
    eSubmitInfo                                = VK_STRUCTURE_TYPE_SUBMIT_INFO,
    eMemoryAllocateInfo                        = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
    eMappedMemoryRange                         = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
    eBindSparseInfo                            = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
    eFenceCreateInfo                           = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
    eSemaphoreCreateInfo                       = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
    eEventCreateInfo                           = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
    eQueryPoolCreateInfo                       = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
    eBufferCreateInfo                          = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
    eBufferViewCreateInfo                      = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
    eImageCreateInfo                           = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
    eImageViewCreateInfo                       = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
    eShaderModuleCreateInfo                    = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
    ePipelineCacheCreateInfo                   = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
    ePipelineShaderStageCreateInfo             = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
    ePipelineVertexInputStateCreateInfo        = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
    ePipelineInputAssemblyStateCreateInfo      = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
    ePipelineTessellationStateCreateInfo       = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
    ePipelineViewportStateCreateInfo           = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
    ePipelineRasterizationStateCreateInfo      = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
    ePipelineMultisampleStateCreateInfo        = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
    ePipelineDepthStencilStateCreateInfo       = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
    ePipelineColorBlendStateCreateInfo         = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
    ePipelineDynamicStateCreateInfo            = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
    eGraphicsPipelineCreateInfo                = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
    eComputePipelineCreateInfo                 = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
    ePipelineLayoutCreateInfo                  = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
    eSamplerCreateInfo                         = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
    eDescriptorSetLayoutCreateInfo             = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
    eDescriptorPoolCreateInfo                  = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
    eDescriptorSetAllocateInfo                 = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
    eWriteDescriptorSet                        = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
    eCopyDescriptorSet                         = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
    eFramebufferCreateInfo                     = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
    eRenderPassCreateInfo                      = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
    eCommandPoolCreateInfo                     = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
    eCommandBufferAllocateInfo                 = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
    eCommandBufferInheritanceInfo              = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
    eCommandBufferBeginInfo                    = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
    eRenderPassBeginInfo                       = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
    eBufferMemoryBarrier                       = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
    eImageMemoryBarrier                        = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
    eMemoryBarrier                             = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
    eLoaderInstanceCreateInfo                  = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
    eLoaderDeviceCreateInfo                    = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
    ePhysicalDeviceSubgroupProperties          = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
    eBindBufferMemoryInfo                      = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
    eBindImageMemoryInfo                       = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
    ePhysicalDevice16BitStorageFeatures        = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
    eMemoryDedicatedRequirements               = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
    eMemoryDedicatedAllocateInfo               = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
    eMemoryAllocateFlagsInfo                   = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
    eDeviceGroupRenderPassBeginInfo            = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
    eDeviceGroupCommandBufferBeginInfo         = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
    eDeviceGroupSubmitInfo                     = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
    eDeviceGroupBindSparseInfo                 = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
    eBindBufferMemoryDeviceGroupInfo           = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
    eBindImageMemoryDeviceGroupInfo            = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
    ePhysicalDeviceGroupProperties             = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
    eDeviceGroupDeviceCreateInfo               = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
    eBufferMemoryRequirementsInfo2             = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
    eImageMemoryRequirementsInfo2              = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
    eImageSparseMemoryRequirementsInfo2        = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
    eMemoryRequirements2                       = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
    eSparseImageMemoryRequirements2            = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
    ePhysicalDeviceFeatures2                   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
    ePhysicalDeviceProperties2                 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
    eFormatProperties2                         = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
    eImageFormatProperties2                    = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
    ePhysicalDeviceImageFormatInfo2            = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
    eQueueFamilyProperties2                    = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
    ePhysicalDeviceMemoryProperties2           = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
    eSparseImageFormatProperties2              = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
    ePhysicalDeviceSparseImageFormatInfo2      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
    ePhysicalDevicePointClippingProperties     = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
    eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
    eImageViewUsageCreateInfo                  = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
    ePipelineTessellationDomainOriginStateCreateInfo =
      VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
    eRenderPassMultiviewCreateInfo                = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
    ePhysicalDeviceMultiviewFeatures              = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
    ePhysicalDeviceMultiviewProperties            = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
    ePhysicalDeviceVariablePointersFeatures       = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
    eProtectedSubmitInfo                          = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO,
    ePhysicalDeviceProtectedMemoryFeatures        = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
    ePhysicalDeviceProtectedMemoryProperties      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
    eDeviceQueueInfo2                             = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
    eSamplerYcbcrConversionCreateInfo             = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
    eSamplerYcbcrConversionInfo                   = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
    eBindImagePlaneMemoryInfo                     = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
    eImagePlaneMemoryRequirementsInfo             = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
    ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
    eSamplerYcbcrConversionImageFormatProperties  = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
    eDescriptorUpdateTemplateCreateInfo           = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
    ePhysicalDeviceExternalImageFormatInfo        = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
    eExternalImageFormatProperties                = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
    ePhysicalDeviceExternalBufferInfo             = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
    eExternalBufferProperties                     = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
    ePhysicalDeviceIdProperties                   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
    eExternalMemoryBufferCreateInfo               = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
    eExternalMemoryImageCreateInfo                = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
    eExportMemoryAllocateInfo                     = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
    ePhysicalDeviceExternalFenceInfo              = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
    eExternalFenceProperties                      = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
    eExportFenceCreateInfo                        = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
    eExportSemaphoreCreateInfo                    = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
    ePhysicalDeviceExternalSemaphoreInfo          = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
    eExternalSemaphoreProperties                  = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
    ePhysicalDeviceMaintenance3Properties         = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
    eDescriptorSetLayoutSupport                   = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
    ePhysicalDeviceShaderDrawParametersFeatures   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
    ePhysicalDeviceVulkan11Features               = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
    ePhysicalDeviceVulkan11Properties             = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
    ePhysicalDeviceVulkan12Features               = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
    ePhysicalDeviceVulkan12Properties             = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
    eImageFormatListCreateInfo                    = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
    eAttachmentDescription2                       = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
    eAttachmentReference2                         = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
    eSubpassDescription2                          = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
    eSubpassDependency2                           = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
    eRenderPassCreateInfo2                        = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
    eSubpassBeginInfo                             = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
    eSubpassEndInfo                               = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
    ePhysicalDevice8BitStorageFeatures            = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
    ePhysicalDeviceDriverProperties               = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
    ePhysicalDeviceShaderAtomicInt64Features      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
    ePhysicalDeviceShaderFloat16Int8Features      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
    ePhysicalDeviceFloatControlsProperties        = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
    eDescriptorSetLayoutBindingFlagsCreateInfo    = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
    ePhysicalDeviceDescriptorIndexingFeatures     = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
    ePhysicalDeviceDescriptorIndexingProperties   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
    eDescriptorSetVariableDescriptorCountAllocateInfo =
      VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
    eDescriptorSetVariableDescriptorCountLayoutSupport =
      VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
    ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
    eSubpassDescriptionDepthStencilResolve       = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
    ePhysicalDeviceScalarBlockLayoutFeatures     = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
    eImageStencilUsageCreateInfo                 = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
    ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
    eSamplerReductionModeCreateInfo              = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
    ePhysicalDeviceVulkanMemoryModelFeatures     = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
    ePhysicalDeviceImagelessFramebufferFeatures  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
    eFramebufferAttachmentsCreateInfo            = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
    eFramebufferAttachmentImageInfo              = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
    eRenderPassAttachmentBeginInfo               = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
    ePhysicalDeviceUniformBufferStandardLayoutFeatures =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
    ePhysicalDeviceShaderSubgroupExtendedTypesFeatures =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
    ePhysicalDeviceSeparateDepthStencilLayoutsFeatures =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
    eAttachmentReferenceStencilLayout          = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
    eAttachmentDescriptionStencilLayout        = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
    ePhysicalDeviceHostQueryResetFeatures      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
    ePhysicalDeviceTimelineSemaphoreFeatures   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
    ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
    eSemaphoreTypeCreateInfo                   = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
    eTimelineSemaphoreSubmitInfo               = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
    eSemaphoreWaitInfo                         = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
    eSemaphoreSignalInfo                       = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
    ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
    eBufferDeviceAddressInfo                   = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
    eBufferOpaqueCaptureAddressCreateInfo      = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
    eMemoryOpaqueCaptureAddressAllocateInfo    = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
    eDeviceMemoryOpaqueCaptureAddressInfo      = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
    eSwapchainCreateInfoKHR                    = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
    ePresentInfoKHR                            = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
    eDeviceGroupPresentCapabilitiesKHR         = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
    eImageSwapchainCreateInfoKHR               = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
    eBindImageMemorySwapchainInfoKHR           = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
    eAcquireNextImageInfoKHR                   = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR,
    eDeviceGroupPresentInfoKHR                 = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR,
    eDeviceGroupSwapchainCreateInfoKHR         = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR,
    eDisplayModeCreateInfoKHR                  = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
    eDisplaySurfaceCreateInfoKHR               = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
    eDisplayPresentInfoKHR                     = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
#if defined( VK_USE_PLATFORM_XLIB_KHR )
    eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
    eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
    eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
    eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
    ePipelineRasterizationStateRasterizationOrderAMD =
      VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
    eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
    eDebugMarkerObjectTagInfoEXT  = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
    eDebugMarkerMarkerInfoEXT     = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoProfileKHR                     = VK_STRUCTURE_TYPE_VIDEO_PROFILE_KHR,
    eVideoCapabilitiesKHR                = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR,
    eVideoPictureResourceKHR             = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_KHR,
    eVideoGetMemoryPropertiesKHR         = VK_STRUCTURE_TYPE_VIDEO_GET_MEMORY_PROPERTIES_KHR,
    eVideoBindMemoryKHR                  = VK_STRUCTURE_TYPE_VIDEO_BIND_MEMORY_KHR,
    eVideoSessionCreateInfoKHR           = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR,
    eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR,
    eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR,
    eVideoBeginCodingInfoKHR             = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR,
    eVideoEndCodingInfoKHR               = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR,
    eVideoCodingControlInfoKHR           = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR,
    eVideoReferenceSlotKHR               = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_KHR,
    eVideoQueueFamilyProperties2KHR      = VK_STRUCTURE_TYPE_VIDEO_QUEUE_FAMILY_PROPERTIES_2_KHR,
    eVideoProfilesKHR                    = VK_STRUCTURE_TYPE_VIDEO_PROFILES_KHR,
    ePhysicalDeviceVideoFormatInfoKHR    = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR,
    eVideoFormatPropertiesKHR            = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR,
    eVideoDecodeInfoKHR                  = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eDedicatedAllocationImageCreateInfoNV         = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
    eDedicatedAllocationBufferCreateInfoNV        = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
    eDedicatedAllocationMemoryAllocateInfoNV      = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
    ePhysicalDeviceTransformFeedbackFeaturesEXT   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
    ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
    ePipelineRasterizationStateStreamCreateInfoEXT =
      VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
    eCuModuleCreateInfoNVX         = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX,
    eCuFunctionCreateInfoNVX       = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX,
    eCuLaunchInfoNVX               = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX,
    eImageViewHandleInfoNVX        = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX,
    eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoEncodeH264CapabilitiesEXT      = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT,
    eVideoEncodeH264SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_EXT,
    eVideoEncodeH264SessionParametersCreateInfoEXT =
      VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT,
    eVideoEncodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT,
    eVideoEncodeH264VclFrameInfoEXT             = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT,
    eVideoEncodeH264DpbSlotInfoEXT              = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT,
    eVideoEncodeH264NaluSliceEXT                = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_EXT,
    eVideoEncodeH264EmitPictureParametersEXT    = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_EXT,
    eVideoEncodeH264ProfileEXT                  = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_EXT,
    eVideoDecodeH264CapabilitiesEXT             = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT,
    eVideoDecodeH264SessionCreateInfoEXT        = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_CREATE_INFO_EXT,
    eVideoDecodeH264PictureInfoEXT              = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT,
    eVideoDecodeH264MvcEXT                      = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_EXT,
    eVideoDecodeH264ProfileEXT                  = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_EXT,
    eVideoDecodeH264SessionParametersCreateInfoEXT =
      VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT,
    eVideoDecodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT,
    eVideoDecodeH264DpbSlotInfoEXT              = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
#if defined( VK_USE_PLATFORM_GGP )
    eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP,
#endif /*VK_USE_PLATFORM_GGP*/
    ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV,
    eExternalMemoryImageCreateInfoNV            = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
    eExportMemoryAllocateInfoNV                 = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    eImportMemoryWin32HandleInfoNV       = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
    eExportMemoryWin32HandleInfoNV       = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
    eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
#if defined( VK_USE_PLATFORM_VI_NN )
    eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
#endif /*VK_USE_PLATFORM_VI_NN*/
    ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT,
    eImageViewAstcDecodeModeEXT          = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT,
    ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
    eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
    eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR,
    eMemoryGetWin32HandleInfoKHR    = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
    eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
    eMemoryGetFdInfoKHR    = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR,
    eImportSemaphoreWin32HandleInfoKHR    = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
    eExportSemaphoreWin32HandleInfoKHR    = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
    eD3D12FenceSubmitInfoKHR              = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR,
    eSemaphoreGetWin32HandleInfoKHR       = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    eImportSemaphoreFdInfoKHR                  = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
    eSemaphoreGetFdInfoKHR                     = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
    ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
    eCommandBufferInheritanceConditionalRenderingInfoEXT =
      VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT,
    ePhysicalDeviceConditionalRenderingFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
    eConditionalRenderingBeginInfoEXT          = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
    ePresentRegionsKHR                         = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
    ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
    eSurfaceCapabilities2EXT                   = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
    eDisplayPowerInfoEXT                       = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
    eDeviceEventInfoEXT                        = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
    eDisplayEventInfoEXT                       = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
    eSwapchainCounterCreateInfoEXT             = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
    ePresentTimesInfoGOOGLE                    = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
    ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
    ePipelineViewportSwizzleStateCreateInfoNV    = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
    ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
    ePipelineDiscardRectangleStateCreateInfoEXT  = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
    ePhysicalDeviceConservativeRasterizationPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
    ePipelineRasterizationConservativeStateCreateInfoEXT =
      VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
    ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT,
    ePipelineRasterizationDepthClipStateCreateInfoEXT =
      VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT,
    eHdrMetadataEXT                      = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
    eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
    eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
    eFenceGetWin32HandleInfoKHR    = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    eImportFenceFdInfoKHR                        = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
    eFenceGetFdInfoKHR                           = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
    ePhysicalDevicePerformanceQueryFeaturesKHR   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR,
    ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR,
    eQueryPoolPerformanceCreateInfoKHR           = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR,
    ePerformanceQuerySubmitInfoKHR               = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR,
    eAcquireProfilingLockInfoKHR                 = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR,
    ePerformanceCounterKHR                       = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR,
    ePerformanceCounterDescriptionKHR            = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR,
    ePhysicalDeviceSurfaceInfo2KHR               = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
    eSurfaceCapabilities2KHR                     = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
    eSurfaceFormat2KHR                           = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR,
    eDisplayProperties2KHR                       = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR,
    eDisplayPlaneProperties2KHR                  = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR,
    eDisplayModeProperties2KHR                   = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR,
    eDisplayPlaneInfo2KHR                        = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR,
    eDisplayPlaneCapabilities2KHR                = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR,
#if defined( VK_USE_PLATFORM_IOS_MVK )
    eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK,
#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_MACOS_MVK )
    eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK,
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
    eDebugUtilsObjectNameInfoEXT        = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
    eDebugUtilsObjectTagInfoEXT         = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT,
    eDebugUtilsLabelEXT                 = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT,
    eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT,
    eDebugUtilsMessengerCreateInfoEXT   = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
    eAndroidHardwareBufferUsageANDROID            = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID,
    eAndroidHardwareBufferPropertiesANDROID       = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
    eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
    eImportAndroidHardwareBufferInfoANDROID       = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
    eMemoryGetAndroidHardwareBufferInfoANDROID    = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
    eExternalFormatANDROID                        = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
    ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
    ePhysicalDeviceInlineUniformBlockPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
    eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT,
    eDescriptorPoolInlineUniformBlockCreateInfoEXT =
      VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT,
    eSampleLocationsInfoEXT                     = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT,
    eRenderPassSampleLocationsBeginInfoEXT      = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
    ePipelineSampleLocationsStateCreateInfoEXT  = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
    ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
    eMultisamplePropertiesEXT                   = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
    ePhysicalDeviceBlendOperationAdvancedFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
    ePhysicalDeviceBlendOperationAdvancedPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
    ePipelineColorBlendAdvancedStateCreateInfoEXT =
      VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
    ePipelineCoverageToColorStateCreateInfoNV   = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
    eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR,
    eAccelerationStructureBuildGeometryInfoKHR  = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR,
    eAccelerationStructureDeviceAddressInfoKHR  = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR,
    eAccelerationStructureGeometryAabbsDataKHR  = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR,
    eAccelerationStructureGeometryInstancesDataKHR =
      VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR,
    eAccelerationStructureGeometryTrianglesDataKHR =
      VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR,
    eAccelerationStructureGeometryKHR         = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR,
    eAccelerationStructureVersionInfoKHR      = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR,
    eCopyAccelerationStructureInfoKHR         = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR,
    eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR,
    eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR,
    ePhysicalDeviceAccelerationStructureFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR,
    ePhysicalDeviceAccelerationStructurePropertiesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR,
    eAccelerationStructureCreateInfoKHR          = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR,
    eAccelerationStructureBuildSizesInfoKHR      = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR,
    ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR,
    ePhysicalDeviceRayTracingPipelinePropertiesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR,
    eRayTracingPipelineCreateInfoKHR             = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR,
    eRayTracingShaderGroupCreateInfoKHR          = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR,
    eRayTracingPipelineInterfaceCreateInfoKHR    = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR,
    ePhysicalDeviceRayQueryFeaturesKHR           = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR,
    ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
    ePhysicalDeviceShaderSmBuiltinsFeaturesNV    = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV,
    ePhysicalDeviceShaderSmBuiltinsPropertiesNV  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV,
    eDrmFormatModifierPropertiesListEXT          = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
    ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
    eImageDrmFormatModifierListCreateInfoEXT     = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT,
    eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
    eImageDrmFormatModifierPropertiesEXT         = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
    eValidationCacheCreateInfoEXT                = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
    eShaderModuleValidationCacheCreateInfoEXT    = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    ePhysicalDevicePortabilitySubsetFeaturesKHR   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR,
    ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    ePipelineViewportShadingRateImageStateCreateInfoNV =
      VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
    ePhysicalDeviceShadingRateImageFeaturesNV   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
    ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
    ePipelineViewportCoarseSampleOrderStateCreateInfoNV =
      VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV,
    eRayTracingPipelineCreateInfoNV            = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV,
    eAccelerationStructureCreateInfoNV         = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV,
    eGeometryNV                                = VK_STRUCTURE_TYPE_GEOMETRY_NV,
    eGeometryTrianglesNV                       = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV,
    eGeometryAabbNV                            = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV,
    eBindAccelerationStructureMemoryInfoNV     = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
    eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
    eAccelerationStructureMemoryRequirementsInfoNV =
      VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
    ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
    eRayTracingShaderGroupCreateInfoNV    = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
    eAccelerationStructureInfoNV          = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV,
    ePhysicalDeviceRepresentativeFragmentTestFeaturesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV,
    ePipelineRepresentativeFragmentTestStateCreateInfoNV =
      VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV,
    ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT,
    eFilterCubicImageViewImageFormatPropertiesEXT =
      VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT,
    eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
    eImportMemoryHostPointerInfoEXT         = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
    eMemoryHostPointerPropertiesEXT         = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
    ePhysicalDeviceExternalMemoryHostPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
    ePhysicalDeviceShaderClockFeaturesKHR  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR,
    ePipelineCompilerControlCreateInfoAMD  = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD,
    eCalibratedTimestampInfoEXT            = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT,
    ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoDecodeH265CapabilitiesEXT      = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT,
    eVideoDecodeH265SessionCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_CREATE_INFO_EXT,
    eVideoDecodeH265SessionParametersCreateInfoEXT =
      VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT,
    eVideoDecodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT,
    eVideoDecodeH265ProfileEXT                  = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_EXT,
    eVideoDecodeH265PictureInfoEXT              = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT,
    eVideoDecodeH265DpbSlotInfoEXT              = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD,
    ePhysicalDeviceVertexAttributeDivisorPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
    ePipelineVertexInputDivisorStateCreateInfoEXT =
      VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
    ePhysicalDeviceVertexAttributeDivisorFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
#if defined( VK_USE_PLATFORM_GGP )
    ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP,
#endif /*VK_USE_PLATFORM_GGP*/
    ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT,
    ePhysicalDeviceComputeShaderDerivativesFeaturesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
    ePhysicalDeviceMeshShaderFeaturesNV   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
    ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
    ePhysicalDeviceFragmentShaderBarycentricFeaturesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV,
    ePhysicalDeviceShaderImageFootprintFeaturesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV,
    ePipelineViewportExclusiveScissorStateCreateInfoNV =
      VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV,
    ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
    eCheckpointDataNV                         = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
    eQueueFamilyCheckpointPropertiesNV        = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
    ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
    eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
    eInitializePerformanceApiInfoINTEL        = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
    ePerformanceMarkerInfoINTEL               = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL,
    ePerformanceStreamMarkerInfoINTEL         = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
    ePerformanceOverrideInfoINTEL             = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL,
    ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL,
    ePhysicalDevicePciBusInfoPropertiesEXT    = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
    eDisplayNativeHdrSurfaceCapabilitiesAMD   = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD,
    eSwapchainDisplayNativeHdrCreateInfoAMD   = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD,
#if defined( VK_USE_PLATFORM_FUCHSIA )
    eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    ePhysicalDeviceShaderTerminateInvocationFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR,
#if defined( VK_USE_PLATFORM_METAL_EXT )
    eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT,
#endif /*VK_USE_PLATFORM_METAL_EXT*/
    ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
    ePhysicalDeviceFragmentDensityMapPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
    eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
    ePhysicalDeviceSubgroupSizeControlPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT,
    ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT =
      VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
    ePhysicalDeviceSubgroupSizeControlFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT,
    eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
    ePipelineFragmentShadingRateStateCreateInfoKHR =
      VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR,
    ePhysicalDeviceFragmentShadingRatePropertiesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR,
    ePhysicalDeviceFragmentShadingRateFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR,
    ePhysicalDeviceFragmentShadingRateKHR    = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR,
    ePhysicalDeviceShaderCoreProperties2AMD  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD,
    ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD,
    ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT,
    ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT,
    ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
    eMemoryPriorityAllocateInfoEXT           = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
    eSurfaceProtectedCapabilitiesKHR         = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR,
    ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV,
    ePhysicalDeviceBufferDeviceAddressFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
    eBufferDeviceAddressCreateInfoEXT            = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
    ePhysicalDeviceToolPropertiesEXT             = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT,
    eValidationFeaturesEXT                       = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
    ePhysicalDeviceCooperativeMatrixFeaturesNV   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV,
    eCooperativeMatrixPropertiesNV               = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV,
    ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV,
    ePhysicalDeviceCoverageReductionModeFeaturesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV,
    ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV,
    eFramebufferMixedSamplesCombinationNV       = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV,
    ePhysicalDeviceFragmentShaderInterlockFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT,
    ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT,
    ePhysicalDeviceProvokingVertexFeaturesEXT  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT,
    ePipelineRasterizationProvokingVertexStateCreateInfoEXT =
      VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT,
    ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT,
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    eSurfaceFullScreenExclusiveInfoEXT         = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
    eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT,
    eSurfaceFullScreenExclusiveWin32InfoEXT    = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT,
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    eHeadlessSurfaceCreateInfoEXT                 = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT,
    ePhysicalDeviceLineRasterizationFeaturesEXT   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
    ePipelineRasterizationLineStateCreateInfoEXT  = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
    ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
    ePhysicalDeviceShaderAtomicFloatFeaturesEXT   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT,
    ePhysicalDeviceIndexTypeUint8FeaturesEXT      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
    ePhysicalDeviceExtendedDynamicStateFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
    ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
    ePipelineInfoKHR                             = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
    ePipelineExecutablePropertiesKHR             = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR,
    ePipelineExecutableInfoKHR                   = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR,
    ePipelineExecutableStatisticKHR              = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
    ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
    ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT,
    ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV,
    eGraphicsShaderGroupCreateInfoNV           = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV,
    eGraphicsPipelineShaderGroupsCreateInfoNV  = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV,
    eIndirectCommandsLayoutTokenNV             = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV,
    eIndirectCommandsLayoutCreateInfoNV        = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV,
    eGeneratedCommandsInfoNV                   = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV,
    eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV,
    ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV,
    ePhysicalDeviceInheritedViewportScissorFeaturesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV,
    eCommandBufferInheritanceViewportScissorInfoNV =
      VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV,
    ePhysicalDeviceTexelBufferAlignmentFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT,
    ePhysicalDeviceTexelBufferAlignmentPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT,
    eCommandBufferInheritanceRenderPassTransformInfoQCOM =
      VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM,
    eRenderPassTransformBeginInfoQCOM            = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM,
    ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT,
    eDeviceDeviceMemoryReportCreateInfoEXT       = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT,
    eDeviceMemoryReportCallbackDataEXT           = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT,
    ePhysicalDeviceRobustness2FeaturesEXT        = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
    ePhysicalDeviceRobustness2PropertiesEXT      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT,
    eSamplerCustomBorderColorCreateInfoEXT       = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT,
    ePhysicalDeviceCustomBorderColorPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT,
    ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
    ePipelineLibraryCreateInfoKHR               = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR,
    ePhysicalDevicePrivateDataFeaturesEXT       = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT,
    eDevicePrivateDataCreateInfoEXT             = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT,
    ePrivateDataSlotCreateInfoEXT               = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT,
    ePhysicalDevicePipelineCreationCacheControlFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoEncodeInfoKHR            = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR,
    eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
    eDeviceDiagnosticsConfigCreateInfoNV       = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
    eMemoryBarrier2KHR                         = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR,
    eBufferMemoryBarrier2KHR                   = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR,
    eImageMemoryBarrier2KHR                    = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR,
    eDependencyInfoKHR                         = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR,
    eSubmitInfo2KHR                            = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR,
    eSemaphoreSubmitInfoKHR                    = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR,
    eCommandBufferSubmitInfoKHR                = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR,
    ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR,
    eQueueFamilyCheckpointProperties2NV        = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV,
    eCheckpointData2NV                         = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV,
    ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR,
    ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR,
    ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV,
    ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV,
    ePipelineFragmentShadingRateEnumStateCreateInfoNV =
      VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV,
    ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT,
    ePhysicalDeviceFragmentDensityMap2FeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT,
    ePhysicalDeviceFragmentDensityMap2PropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT,
    eCopyCommandTransformInfoQCOM             = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM,
    ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT,
    ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR,
    eCopyBufferInfo2KHR                   = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR,
    eCopyImageInfo2KHR                    = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR,
    eCopyBufferToImageInfo2KHR            = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR,
    eCopyImageToBufferInfo2KHR            = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR,
    eBlitImageInfo2KHR                    = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR,
    eResolveImageInfo2KHR                 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR,
    eBufferCopy2KHR                       = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR,
    eImageCopy2KHR                        = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR,
    eImageBlit2KHR                        = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR,
    eBufferImageCopy2KHR                  = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR,
    eImageResolve2KHR                     = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR,
    ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT,
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
    eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT,
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
    ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE,
    eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE,
    ePhysicalDeviceVertexInputDynamicStateFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT,
    eVertexInputBindingDescription2EXT   = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT,
    eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT,
#if defined( VK_USE_PLATFORM_FUCHSIA )
    eImportMemoryZirconHandleInfoFUCHSIA    = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA,
    eMemoryZirconHandlePropertiesFUCHSIA    = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA,
    eMemoryGetZirconHandleInfoFUCHSIA       = VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA,
    eImportSemaphoreZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA,
    eSemaphoreGetZirconHandleInfoFUCHSIA    = VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    ePhysicalDeviceExtendedDynamicState2FeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT,
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
    eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX,
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
    ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT,
    ePipelineColorWriteCreateInfoEXT           = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT,
    ePhysicalDeviceGlobalPriorityQueryFeaturesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT,
    eQueueFamilyGlobalPriorityPropertiesEXT  = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT,
    eAttachmentDescription2KHR               = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
    eAttachmentDescriptionStencilLayoutKHR   = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
    eAttachmentReference2KHR                 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
    eAttachmentReferenceStencilLayoutKHR     = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
    eBindBufferMemoryDeviceGroupInfoKHR      = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
    eBindBufferMemoryInfoKHR                 = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
    eBindImageMemoryDeviceGroupInfoKHR       = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
    eBindImageMemoryInfoKHR                  = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
    eBindImagePlaneMemoryInfoKHR             = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
    eBufferDeviceAddressInfoEXT              = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
    eBufferDeviceAddressInfoKHR              = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
    eBufferMemoryRequirementsInfo2KHR        = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
    eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
    eDebugReportCreateInfoEXT                = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
    eDescriptorSetLayoutBindingFlagsCreateInfoEXT =
      VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
    eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
    eDescriptorSetVariableDescriptorCountAllocateInfoEXT =
      VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
    eDescriptorSetVariableDescriptorCountLayoutSupportEXT =
      VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
    eDescriptorUpdateTemplateCreateInfoKHR     = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
    eDeviceGroupBindSparseInfoKHR              = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
    eDeviceGroupCommandBufferBeginInfoKHR      = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
    eDeviceGroupDeviceCreateInfoKHR            = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
    eDeviceGroupRenderPassBeginInfoKHR         = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
    eDeviceGroupSubmitInfoKHR                  = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
    eDeviceMemoryOpaqueCaptureAddressInfoKHR   = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
    eExportFenceCreateInfoKHR                  = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
    eExportMemoryAllocateInfoKHR               = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
    eExportSemaphoreCreateInfoKHR              = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
    eExternalBufferPropertiesKHR               = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
    eExternalFencePropertiesKHR                = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
    eExternalImageFormatPropertiesKHR          = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
    eExternalMemoryBufferCreateInfoKHR         = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
    eExternalMemoryImageCreateInfoKHR          = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
    eExternalSemaphorePropertiesKHR            = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
    eFormatProperties2KHR                      = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
    eFramebufferAttachmentsCreateInfoKHR       = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
    eFramebufferAttachmentImageInfoKHR         = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
    eImageFormatListCreateInfoKHR              = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
    eImageFormatProperties2KHR                 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
    eImageMemoryRequirementsInfo2KHR           = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
    eImagePlaneMemoryRequirementsInfoKHR       = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
    eImageSparseMemoryRequirementsInfo2KHR     = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
    eImageStencilUsageCreateInfoEXT            = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
    eImageViewUsageCreateInfoKHR               = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
    eMemoryAllocateFlagsInfoKHR                = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
    eMemoryDedicatedAllocateInfoKHR            = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
    eMemoryDedicatedRequirementsKHR            = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
    eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
    eMemoryRequirements2KHR                    = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
    ePhysicalDevice16BitStorageFeaturesKHR     = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
    ePhysicalDevice8BitStorageFeaturesKHR      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
    ePhysicalDeviceBufferAddressFeaturesEXT    = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
    ePhysicalDeviceBufferDeviceAddressFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
    ePhysicalDeviceDepthStencilResolvePropertiesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
    ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
    ePhysicalDeviceDescriptorIndexingPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
    ePhysicalDeviceDriverPropertiesKHR        = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
    ePhysicalDeviceExternalBufferInfoKHR      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
    ePhysicalDeviceExternalFenceInfoKHR       = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
    ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
    ePhysicalDeviceExternalSemaphoreInfoKHR   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
    ePhysicalDeviceFeatures2KHR               = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
    ePhysicalDeviceFloat16Int8FeaturesKHR     = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
    ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
    ePhysicalDeviceGroupPropertiesKHR         = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
    ePhysicalDeviceHostQueryResetFeaturesEXT  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
    ePhysicalDeviceIdPropertiesKHR            = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
    ePhysicalDeviceImagelessFramebufferFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
    ePhysicalDeviceImageFormatInfo2KHR        = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
    ePhysicalDeviceMaintenance3PropertiesKHR  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
    ePhysicalDeviceMemoryProperties2KHR       = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
    ePhysicalDeviceMultiviewFeaturesKHR       = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
    ePhysicalDeviceMultiviewPropertiesKHR     = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
    ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
    ePhysicalDeviceProperties2KHR             = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
    ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
    ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
    ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
    ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
    ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
    ePhysicalDeviceShaderDrawParameterFeatures  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
    ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
    ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
    ePhysicalDeviceSparseImageFormatInfo2KHR      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
    ePhysicalDeviceTimelineSemaphoreFeaturesKHR   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
    ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
    ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR =
      VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
    ePhysicalDeviceVariablePointersFeaturesKHR  = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
    ePhysicalDeviceVariablePointerFeatures      = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
    ePhysicalDeviceVariablePointerFeaturesKHR   = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
    ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
    ePipelineTessellationDomainOriginStateCreateInfoKHR =
      VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
    eQueryPoolCreateInfoINTEL         = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
    eQueueFamilyProperties2KHR        = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
    eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
    eRenderPassCreateInfo2KHR         = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
    eRenderPassInputAttachmentAspectCreateInfoKHR =
      VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
    eRenderPassMultiviewCreateInfoKHR    = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
    eSamplerReductionModeCreateInfoEXT   = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
    eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
    eSamplerYcbcrConversionImageFormatPropertiesKHR =
      VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
    eSamplerYcbcrConversionInfoKHR            = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
    eSemaphoreSignalInfoKHR                   = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
    eSemaphoreTypeCreateInfoKHR               = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
    eSemaphoreWaitInfoKHR                     = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
    eSparseImageFormatProperties2KHR          = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
    eSparseImageMemoryRequirements2KHR        = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
    eSubpassBeginInfoKHR                      = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
    eSubpassDependency2KHR                    = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
    eSubpassDescription2KHR                   = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
    eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
    eSubpassEndInfoKHR                        = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
    eTimelineSemaphoreSubmitInfoKHR           = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( StructureType value )
  {
    switch ( value )
    {
      case StructureType::eApplicationInfo: return "ApplicationInfo";
      case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo";
      case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo";
      case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo";
      case StructureType::eSubmitInfo: return "SubmitInfo";
      case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo";
      case StructureType::eMappedMemoryRange: return "MappedMemoryRange";
      case StructureType::eBindSparseInfo: return "BindSparseInfo";
      case StructureType::eFenceCreateInfo: return "FenceCreateInfo";
      case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo";
      case StructureType::eEventCreateInfo: return "EventCreateInfo";
      case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo";
      case StructureType::eBufferCreateInfo: return "BufferCreateInfo";
      case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo";
      case StructureType::eImageCreateInfo: return "ImageCreateInfo";
      case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo";
      case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo";
      case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo";
      case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo";
      case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo";
      case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo";
      case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo";
      case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo";
      case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo";
      case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo";
      case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo";
      case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo";
      case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo";
      case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo";
      case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo";
      case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo";
      case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo";
      case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo";
      case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo";
      case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo";
      case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet";
      case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet";
      case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo";
      case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo";
      case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo";
      case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo";
      case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo";
      case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo";
      case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo";
      case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier";
      case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier";
      case StructureType::eMemoryBarrier: return "MemoryBarrier";
      case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo";
      case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo";
      case StructureType::ePhysicalDeviceSubgroupProperties: return "PhysicalDeviceSubgroupProperties";
      case StructureType::eBindBufferMemoryInfo: return "BindBufferMemoryInfo";
      case StructureType::eBindImageMemoryInfo: return "BindImageMemoryInfo";
      case StructureType::ePhysicalDevice16BitStorageFeatures: return "PhysicalDevice16BitStorageFeatures";
      case StructureType::eMemoryDedicatedRequirements: return "MemoryDedicatedRequirements";
      case StructureType::eMemoryDedicatedAllocateInfo: return "MemoryDedicatedAllocateInfo";
      case StructureType::eMemoryAllocateFlagsInfo: return "MemoryAllocateFlagsInfo";
      case StructureType::eDeviceGroupRenderPassBeginInfo: return "DeviceGroupRenderPassBeginInfo";
      case StructureType::eDeviceGroupCommandBufferBeginInfo: return "DeviceGroupCommandBufferBeginInfo";
      case StructureType::eDeviceGroupSubmitInfo: return "DeviceGroupSubmitInfo";
      case StructureType::eDeviceGroupBindSparseInfo: return "DeviceGroupBindSparseInfo";
      case StructureType::eBindBufferMemoryDeviceGroupInfo: return "BindBufferMemoryDeviceGroupInfo";
      case StructureType::eBindImageMemoryDeviceGroupInfo: return "BindImageMemoryDeviceGroupInfo";
      case StructureType::ePhysicalDeviceGroupProperties: return "PhysicalDeviceGroupProperties";
      case StructureType::eDeviceGroupDeviceCreateInfo: return "DeviceGroupDeviceCreateInfo";
      case StructureType::eBufferMemoryRequirementsInfo2: return "BufferMemoryRequirementsInfo2";
      case StructureType::eImageMemoryRequirementsInfo2: return "ImageMemoryRequirementsInfo2";
      case StructureType::eImageSparseMemoryRequirementsInfo2: return "ImageSparseMemoryRequirementsInfo2";
      case StructureType::eMemoryRequirements2: return "MemoryRequirements2";
      case StructureType::eSparseImageMemoryRequirements2: return "SparseImageMemoryRequirements2";
      case StructureType::ePhysicalDeviceFeatures2: return "PhysicalDeviceFeatures2";
      case StructureType::ePhysicalDeviceProperties2: return "PhysicalDeviceProperties2";
      case StructureType::eFormatProperties2: return "FormatProperties2";
      case StructureType::eImageFormatProperties2: return "ImageFormatProperties2";
      case StructureType::ePhysicalDeviceImageFormatInfo2: return "PhysicalDeviceImageFormatInfo2";
      case StructureType::eQueueFamilyProperties2: return "QueueFamilyProperties2";
      case StructureType::ePhysicalDeviceMemoryProperties2: return "PhysicalDeviceMemoryProperties2";
      case StructureType::eSparseImageFormatProperties2: return "SparseImageFormatProperties2";
      case StructureType::ePhysicalDeviceSparseImageFormatInfo2: return "PhysicalDeviceSparseImageFormatInfo2";
      case StructureType::ePhysicalDevicePointClippingProperties: return "PhysicalDevicePointClippingProperties";
      case StructureType::eRenderPassInputAttachmentAspectCreateInfo:
        return "RenderPassInputAttachmentAspectCreateInfo";
      case StructureType::eImageViewUsageCreateInfo: return "ImageViewUsageCreateInfo";
      case StructureType::ePipelineTessellationDomainOriginStateCreateInfo:
        return "PipelineTessellationDomainOriginStateCreateInfo";
      case StructureType::eRenderPassMultiviewCreateInfo: return "RenderPassMultiviewCreateInfo";
      case StructureType::ePhysicalDeviceMultiviewFeatures: return "PhysicalDeviceMultiviewFeatures";
      case StructureType::ePhysicalDeviceMultiviewProperties: return "PhysicalDeviceMultiviewProperties";
      case StructureType::ePhysicalDeviceVariablePointersFeatures: return "PhysicalDeviceVariablePointersFeatures";
      case StructureType::eProtectedSubmitInfo: return "ProtectedSubmitInfo";
      case StructureType::ePhysicalDeviceProtectedMemoryFeatures: return "PhysicalDeviceProtectedMemoryFeatures";
      case StructureType::ePhysicalDeviceProtectedMemoryProperties: return "PhysicalDeviceProtectedMemoryProperties";
      case StructureType::eDeviceQueueInfo2: return "DeviceQueueInfo2";
      case StructureType::eSamplerYcbcrConversionCreateInfo: return "SamplerYcbcrConversionCreateInfo";
      case StructureType::eSamplerYcbcrConversionInfo: return "SamplerYcbcrConversionInfo";
      case StructureType::eBindImagePlaneMemoryInfo: return "BindImagePlaneMemoryInfo";
      case StructureType::eImagePlaneMemoryRequirementsInfo: return "ImagePlaneMemoryRequirementsInfo";
      case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures:
        return "PhysicalDeviceSamplerYcbcrConversionFeatures";
      case StructureType::eSamplerYcbcrConversionImageFormatProperties:
        return "SamplerYcbcrConversionImageFormatProperties";
      case StructureType::eDescriptorUpdateTemplateCreateInfo: return "DescriptorUpdateTemplateCreateInfo";
      case StructureType::ePhysicalDeviceExternalImageFormatInfo: return "PhysicalDeviceExternalImageFormatInfo";
      case StructureType::eExternalImageFormatProperties: return "ExternalImageFormatProperties";
      case StructureType::ePhysicalDeviceExternalBufferInfo: return "PhysicalDeviceExternalBufferInfo";
      case StructureType::eExternalBufferProperties: return "ExternalBufferProperties";
      case StructureType::ePhysicalDeviceIdProperties: return "PhysicalDeviceIdProperties";
      case StructureType::eExternalMemoryBufferCreateInfo: return "ExternalMemoryBufferCreateInfo";
      case StructureType::eExternalMemoryImageCreateInfo: return "ExternalMemoryImageCreateInfo";
      case StructureType::eExportMemoryAllocateInfo: return "ExportMemoryAllocateInfo";
      case StructureType::ePhysicalDeviceExternalFenceInfo: return "PhysicalDeviceExternalFenceInfo";
      case StructureType::eExternalFenceProperties: return "ExternalFenceProperties";
      case StructureType::eExportFenceCreateInfo: return "ExportFenceCreateInfo";
      case StructureType::eExportSemaphoreCreateInfo: return "ExportSemaphoreCreateInfo";
      case StructureType::ePhysicalDeviceExternalSemaphoreInfo: return "PhysicalDeviceExternalSemaphoreInfo";
      case StructureType::eExternalSemaphoreProperties: return "ExternalSemaphoreProperties";
      case StructureType::ePhysicalDeviceMaintenance3Properties: return "PhysicalDeviceMaintenance3Properties";
      case StructureType::eDescriptorSetLayoutSupport: return "DescriptorSetLayoutSupport";
      case StructureType::ePhysicalDeviceShaderDrawParametersFeatures:
        return "PhysicalDeviceShaderDrawParametersFeatures";
      case StructureType::ePhysicalDeviceVulkan11Features: return "PhysicalDeviceVulkan11Features";
      case StructureType::ePhysicalDeviceVulkan11Properties: return "PhysicalDeviceVulkan11Properties";
      case StructureType::ePhysicalDeviceVulkan12Features: return "PhysicalDeviceVulkan12Features";
      case StructureType::ePhysicalDeviceVulkan12Properties: return "PhysicalDeviceVulkan12Properties";
      case StructureType::eImageFormatListCreateInfo: return "ImageFormatListCreateInfo";
      case StructureType::eAttachmentDescription2: return "AttachmentDescription2";
      case StructureType::eAttachmentReference2: return "AttachmentReference2";
      case StructureType::eSubpassDescription2: return "SubpassDescription2";
      case StructureType::eSubpassDependency2: return "SubpassDependency2";
      case StructureType::eRenderPassCreateInfo2: return "RenderPassCreateInfo2";
      case StructureType::eSubpassBeginInfo: return "SubpassBeginInfo";
      case StructureType::eSubpassEndInfo: return "SubpassEndInfo";
      case StructureType::ePhysicalDevice8BitStorageFeatures: return "PhysicalDevice8BitStorageFeatures";
      case StructureType::ePhysicalDeviceDriverProperties: return "PhysicalDeviceDriverProperties";
      case StructureType::ePhysicalDeviceShaderAtomicInt64Features: return "PhysicalDeviceShaderAtomicInt64Features";
      case StructureType::ePhysicalDeviceShaderFloat16Int8Features: return "PhysicalDeviceShaderFloat16Int8Features";
      case StructureType::ePhysicalDeviceFloatControlsProperties: return "PhysicalDeviceFloatControlsProperties";
      case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo:
        return "DescriptorSetLayoutBindingFlagsCreateInfo";
      case StructureType::ePhysicalDeviceDescriptorIndexingFeatures: return "PhysicalDeviceDescriptorIndexingFeatures";
      case StructureType::ePhysicalDeviceDescriptorIndexingProperties:
        return "PhysicalDeviceDescriptorIndexingProperties";
      case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo:
        return "DescriptorSetVariableDescriptorCountAllocateInfo";
      case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport:
        return "DescriptorSetVariableDescriptorCountLayoutSupport";
      case StructureType::ePhysicalDeviceDepthStencilResolveProperties:
        return "PhysicalDeviceDepthStencilResolveProperties";
      case StructureType::eSubpassDescriptionDepthStencilResolve: return "SubpassDescriptionDepthStencilResolve";
      case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures: return "PhysicalDeviceScalarBlockLayoutFeatures";
      case StructureType::eImageStencilUsageCreateInfo: return "ImageStencilUsageCreateInfo";
      case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties:
        return "PhysicalDeviceSamplerFilterMinmaxProperties";
      case StructureType::eSamplerReductionModeCreateInfo: return "SamplerReductionModeCreateInfo";
      case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures: return "PhysicalDeviceVulkanMemoryModelFeatures";
      case StructureType::ePhysicalDeviceImagelessFramebufferFeatures:
        return "PhysicalDeviceImagelessFramebufferFeatures";
      case StructureType::eFramebufferAttachmentsCreateInfo: return "FramebufferAttachmentsCreateInfo";
      case StructureType::eFramebufferAttachmentImageInfo: return "FramebufferAttachmentImageInfo";
      case StructureType::eRenderPassAttachmentBeginInfo: return "RenderPassAttachmentBeginInfo";
      case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures:
        return "PhysicalDeviceUniformBufferStandardLayoutFeatures";
      case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures:
        return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures";
      case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures:
        return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures";
      case StructureType::eAttachmentReferenceStencilLayout: return "AttachmentReferenceStencilLayout";
      case StructureType::eAttachmentDescriptionStencilLayout: return "AttachmentDescriptionStencilLayout";
      case StructureType::ePhysicalDeviceHostQueryResetFeatures: return "PhysicalDeviceHostQueryResetFeatures";
      case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures: return "PhysicalDeviceTimelineSemaphoreFeatures";
      case StructureType::ePhysicalDeviceTimelineSemaphoreProperties:
        return "PhysicalDeviceTimelineSemaphoreProperties";
      case StructureType::eSemaphoreTypeCreateInfo: return "SemaphoreTypeCreateInfo";
      case StructureType::eTimelineSemaphoreSubmitInfo: return "TimelineSemaphoreSubmitInfo";
      case StructureType::eSemaphoreWaitInfo: return "SemaphoreWaitInfo";
      case StructureType::eSemaphoreSignalInfo: return "SemaphoreSignalInfo";
      case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures:
        return "PhysicalDeviceBufferDeviceAddressFeatures";
      case StructureType::eBufferDeviceAddressInfo: return "BufferDeviceAddressInfo";
      case StructureType::eBufferOpaqueCaptureAddressCreateInfo: return "BufferOpaqueCaptureAddressCreateInfo";
      case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo: return "MemoryOpaqueCaptureAddressAllocateInfo";
      case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo: return "DeviceMemoryOpaqueCaptureAddressInfo";
      case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR";
      case StructureType::ePresentInfoKHR: return "PresentInfoKHR";
      case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR";
      case StructureType::eImageSwapchainCreateInfoKHR: return "ImageSwapchainCreateInfoKHR";
      case StructureType::eBindImageMemorySwapchainInfoKHR: return "BindImageMemorySwapchainInfoKHR";
      case StructureType::eAcquireNextImageInfoKHR: return "AcquireNextImageInfoKHR";
      case StructureType::eDeviceGroupPresentInfoKHR: return "DeviceGroupPresentInfoKHR";
      case StructureType::eDeviceGroupSwapchainCreateInfoKHR: return "DeviceGroupSwapchainCreateInfoKHR";
      case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR";
      case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR";
      case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR";
#if defined( VK_USE_PLATFORM_XLIB_KHR )
      case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR";
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
      case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR";
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
      case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR";
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
      case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR";
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR";
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT";
      case StructureType::ePipelineRasterizationStateRasterizationOrderAMD:
        return "PipelineRasterizationStateRasterizationOrderAMD";
      case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT";
      case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT";
      case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case StructureType::eVideoProfileKHR: return "VideoProfileKHR";
      case StructureType::eVideoCapabilitiesKHR: return "VideoCapabilitiesKHR";
      case StructureType::eVideoPictureResourceKHR: return "VideoPictureResourceKHR";
      case StructureType::eVideoGetMemoryPropertiesKHR: return "VideoGetMemoryPropertiesKHR";
      case StructureType::eVideoBindMemoryKHR: return "VideoBindMemoryKHR";
      case StructureType::eVideoSessionCreateInfoKHR: return "VideoSessionCreateInfoKHR";
      case StructureType::eVideoSessionParametersCreateInfoKHR: return "VideoSessionParametersCreateInfoKHR";
      case StructureType::eVideoSessionParametersUpdateInfoKHR: return "VideoSessionParametersUpdateInfoKHR";
      case StructureType::eVideoBeginCodingInfoKHR: return "VideoBeginCodingInfoKHR";
      case StructureType::eVideoEndCodingInfoKHR: return "VideoEndCodingInfoKHR";
      case StructureType::eVideoCodingControlInfoKHR: return "VideoCodingControlInfoKHR";
      case StructureType::eVideoReferenceSlotKHR: return "VideoReferenceSlotKHR";
      case StructureType::eVideoQueueFamilyProperties2KHR: return "VideoQueueFamilyProperties2KHR";
      case StructureType::eVideoProfilesKHR: return "VideoProfilesKHR";
      case StructureType::ePhysicalDeviceVideoFormatInfoKHR: return "PhysicalDeviceVideoFormatInfoKHR";
      case StructureType::eVideoFormatPropertiesKHR: return "VideoFormatPropertiesKHR";
      case StructureType::eVideoDecodeInfoKHR: return "VideoDecodeInfoKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV";
      case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV";
      case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV";
      case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT:
        return "PhysicalDeviceTransformFeedbackFeaturesEXT";
      case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT:
        return "PhysicalDeviceTransformFeedbackPropertiesEXT";
      case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT:
        return "PipelineRasterizationStateStreamCreateInfoEXT";
      case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX";
      case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX";
      case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX";
      case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX";
      case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case StructureType::eVideoEncodeH264CapabilitiesEXT: return "VideoEncodeH264CapabilitiesEXT";
      case StructureType::eVideoEncodeH264SessionCreateInfoEXT: return "VideoEncodeH264SessionCreateInfoEXT";
      case StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT:
        return "VideoEncodeH264SessionParametersCreateInfoEXT";
      case StructureType::eVideoEncodeH264SessionParametersAddInfoEXT:
        return "VideoEncodeH264SessionParametersAddInfoEXT";
      case StructureType::eVideoEncodeH264VclFrameInfoEXT: return "VideoEncodeH264VclFrameInfoEXT";
      case StructureType::eVideoEncodeH264DpbSlotInfoEXT: return "VideoEncodeH264DpbSlotInfoEXT";
      case StructureType::eVideoEncodeH264NaluSliceEXT: return "VideoEncodeH264NaluSliceEXT";
      case StructureType::eVideoEncodeH264EmitPictureParametersEXT: return "VideoEncodeH264EmitPictureParametersEXT";
      case StructureType::eVideoEncodeH264ProfileEXT: return "VideoEncodeH264ProfileEXT";
      case StructureType::eVideoDecodeH264CapabilitiesEXT: return "VideoDecodeH264CapabilitiesEXT";
      case StructureType::eVideoDecodeH264SessionCreateInfoEXT: return "VideoDecodeH264SessionCreateInfoEXT";
      case StructureType::eVideoDecodeH264PictureInfoEXT: return "VideoDecodeH264PictureInfoEXT";
      case StructureType::eVideoDecodeH264MvcEXT: return "VideoDecodeH264MvcEXT";
      case StructureType::eVideoDecodeH264ProfileEXT: return "VideoDecodeH264ProfileEXT";
      case StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT:
        return "VideoDecodeH264SessionParametersCreateInfoEXT";
      case StructureType::eVideoDecodeH264SessionParametersAddInfoEXT:
        return "VideoDecodeH264SessionParametersAddInfoEXT";
      case StructureType::eVideoDecodeH264DpbSlotInfoEXT: return "VideoDecodeH264DpbSlotInfoEXT";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD";
#if defined( VK_USE_PLATFORM_GGP )
      case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP";
#endif /*VK_USE_PLATFORM_GGP*/
      case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV:
        return "PhysicalDeviceCornerSampledImageFeaturesNV";
      case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV";
      case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV";
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV";
      case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
      case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
#if defined( VK_USE_PLATFORM_VI_NN )
      case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN";
#endif /*VK_USE_PLATFORM_VI_NN*/
      case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT:
        return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT";
      case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT";
      case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT";
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR";
      case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR";
      case StructureType::eMemoryWin32HandlePropertiesKHR: return "MemoryWin32HandlePropertiesKHR";
      case StructureType::eMemoryGetWin32HandleInfoKHR: return "MemoryGetWin32HandleInfoKHR";
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      case StructureType::eImportMemoryFdInfoKHR: return "ImportMemoryFdInfoKHR";
      case StructureType::eMemoryFdPropertiesKHR: return "MemoryFdPropertiesKHR";
      case StructureType::eMemoryGetFdInfoKHR: return "MemoryGetFdInfoKHR";
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR: return "Win32KeyedMutexAcquireReleaseInfoKHR";
      case StructureType::eImportSemaphoreWin32HandleInfoKHR: return "ImportSemaphoreWin32HandleInfoKHR";
      case StructureType::eExportSemaphoreWin32HandleInfoKHR: return "ExportSemaphoreWin32HandleInfoKHR";
      case StructureType::eD3D12FenceSubmitInfoKHR: return "D3D12FenceSubmitInfoKHR";
      case StructureType::eSemaphoreGetWin32HandleInfoKHR: return "SemaphoreGetWin32HandleInfoKHR";
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR";
      case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR";
      case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR:
        return "PhysicalDevicePushDescriptorPropertiesKHR";
      case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT:
        return "CommandBufferInheritanceConditionalRenderingInfoEXT";
      case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT:
        return "PhysicalDeviceConditionalRenderingFeaturesEXT";
      case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT";
      case StructureType::ePresentRegionsKHR: return "PresentRegionsKHR";
      case StructureType::ePipelineViewportWScalingStateCreateInfoNV:
        return "PipelineViewportWScalingStateCreateInfoNV";
      case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT";
      case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT";
      case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT";
      case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT";
      case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT";
      case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE";
      case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX:
        return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
      case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV";
      case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT:
        return "PhysicalDeviceDiscardRectanglePropertiesEXT";
      case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT:
        return "PipelineDiscardRectangleStateCreateInfoEXT";
      case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT:
        return "PhysicalDeviceConservativeRasterizationPropertiesEXT";
      case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT:
        return "PipelineRasterizationConservativeStateCreateInfoEXT";
      case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT";
      case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT:
        return "PipelineRasterizationDepthClipStateCreateInfoEXT";
      case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT";
      case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR";
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR";
      case StructureType::eExportFenceWin32HandleInfoKHR: return "ExportFenceWin32HandleInfoKHR";
      case StructureType::eFenceGetWin32HandleInfoKHR: return "FenceGetWin32HandleInfoKHR";
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      case StructureType::eImportFenceFdInfoKHR: return "ImportFenceFdInfoKHR";
      case StructureType::eFenceGetFdInfoKHR: return "FenceGetFdInfoKHR";
      case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR:
        return "PhysicalDevicePerformanceQueryFeaturesKHR";
      case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR:
        return "PhysicalDevicePerformanceQueryPropertiesKHR";
      case StructureType::eQueryPoolPerformanceCreateInfoKHR: return "QueryPoolPerformanceCreateInfoKHR";
      case StructureType::ePerformanceQuerySubmitInfoKHR: return "PerformanceQuerySubmitInfoKHR";
      case StructureType::eAcquireProfilingLockInfoKHR: return "AcquireProfilingLockInfoKHR";
      case StructureType::ePerformanceCounterKHR: return "PerformanceCounterKHR";
      case StructureType::ePerformanceCounterDescriptionKHR: return "PerformanceCounterDescriptionKHR";
      case StructureType::ePhysicalDeviceSurfaceInfo2KHR: return "PhysicalDeviceSurfaceInfo2KHR";
      case StructureType::eSurfaceCapabilities2KHR: return "SurfaceCapabilities2KHR";
      case StructureType::eSurfaceFormat2KHR: return "SurfaceFormat2KHR";
      case StructureType::eDisplayProperties2KHR: return "DisplayProperties2KHR";
      case StructureType::eDisplayPlaneProperties2KHR: return "DisplayPlaneProperties2KHR";
      case StructureType::eDisplayModeProperties2KHR: return "DisplayModeProperties2KHR";
      case StructureType::eDisplayPlaneInfo2KHR: return "DisplayPlaneInfo2KHR";
      case StructureType::eDisplayPlaneCapabilities2KHR: return "DisplayPlaneCapabilities2KHR";
#if defined( VK_USE_PLATFORM_IOS_MVK )
      case StructureType::eIosSurfaceCreateInfoMVK: return "IosSurfaceCreateInfoMVK";
#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_MACOS_MVK )
      case StructureType::eMacosSurfaceCreateInfoMVK: return "MacosSurfaceCreateInfoMVK";
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
      case StructureType::eDebugUtilsObjectNameInfoEXT: return "DebugUtilsObjectNameInfoEXT";
      case StructureType::eDebugUtilsObjectTagInfoEXT: return "DebugUtilsObjectTagInfoEXT";
      case StructureType::eDebugUtilsLabelEXT: return "DebugUtilsLabelEXT";
      case StructureType::eDebugUtilsMessengerCallbackDataEXT: return "DebugUtilsMessengerCallbackDataEXT";
      case StructureType::eDebugUtilsMessengerCreateInfoEXT: return "DebugUtilsMessengerCreateInfoEXT";
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
      case StructureType::eAndroidHardwareBufferUsageANDROID: return "AndroidHardwareBufferUsageANDROID";
      case StructureType::eAndroidHardwareBufferPropertiesANDROID: return "AndroidHardwareBufferPropertiesANDROID";
      case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID:
        return "AndroidHardwareBufferFormatPropertiesANDROID";
      case StructureType::eImportAndroidHardwareBufferInfoANDROID: return "ImportAndroidHardwareBufferInfoANDROID";
      case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID:
        return "MemoryGetAndroidHardwareBufferInfoANDROID";
      case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID";
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
      case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT:
        return "PhysicalDeviceInlineUniformBlockFeaturesEXT";
      case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT:
        return "PhysicalDeviceInlineUniformBlockPropertiesEXT";
      case StructureType::eWriteDescriptorSetInlineUniformBlockEXT: return "WriteDescriptorSetInlineUniformBlockEXT";
      case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT:
        return "DescriptorPoolInlineUniformBlockCreateInfoEXT";
      case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT";
      case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT";
      case StructureType::ePipelineSampleLocationsStateCreateInfoEXT:
        return "PipelineSampleLocationsStateCreateInfoEXT";
      case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT:
        return "PhysicalDeviceSampleLocationsPropertiesEXT";
      case StructureType::eMultisamplePropertiesEXT: return "MultisamplePropertiesEXT";
      case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT:
        return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
      case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT:
        return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
      case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT:
        return "PipelineColorBlendAdvancedStateCreateInfoEXT";
      case StructureType::ePipelineCoverageToColorStateCreateInfoNV: return "PipelineCoverageToColorStateCreateInfoNV";
      case StructureType::eWriteDescriptorSetAccelerationStructureKHR:
        return "WriteDescriptorSetAccelerationStructureKHR";
      case StructureType::eAccelerationStructureBuildGeometryInfoKHR:
        return "AccelerationStructureBuildGeometryInfoKHR";
      case StructureType::eAccelerationStructureDeviceAddressInfoKHR:
        return "AccelerationStructureDeviceAddressInfoKHR";
      case StructureType::eAccelerationStructureGeometryAabbsDataKHR:
        return "AccelerationStructureGeometryAabbsDataKHR";
      case StructureType::eAccelerationStructureGeometryInstancesDataKHR:
        return "AccelerationStructureGeometryInstancesDataKHR";
      case StructureType::eAccelerationStructureGeometryTrianglesDataKHR:
        return "AccelerationStructureGeometryTrianglesDataKHR";
      case StructureType::eAccelerationStructureGeometryKHR: return "AccelerationStructureGeometryKHR";
      case StructureType::eAccelerationStructureVersionInfoKHR: return "AccelerationStructureVersionInfoKHR";
      case StructureType::eCopyAccelerationStructureInfoKHR: return "CopyAccelerationStructureInfoKHR";
      case StructureType::eCopyAccelerationStructureToMemoryInfoKHR: return "CopyAccelerationStructureToMemoryInfoKHR";
      case StructureType::eCopyMemoryToAccelerationStructureInfoKHR: return "CopyMemoryToAccelerationStructureInfoKHR";
      case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR:
        return "PhysicalDeviceAccelerationStructureFeaturesKHR";
      case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR:
        return "PhysicalDeviceAccelerationStructurePropertiesKHR";
      case StructureType::eAccelerationStructureCreateInfoKHR: return "AccelerationStructureCreateInfoKHR";
      case StructureType::eAccelerationStructureBuildSizesInfoKHR: return "AccelerationStructureBuildSizesInfoKHR";
      case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR:
        return "PhysicalDeviceRayTracingPipelineFeaturesKHR";
      case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR:
        return "PhysicalDeviceRayTracingPipelinePropertiesKHR";
      case StructureType::eRayTracingPipelineCreateInfoKHR: return "RayTracingPipelineCreateInfoKHR";
      case StructureType::eRayTracingShaderGroupCreateInfoKHR: return "RayTracingShaderGroupCreateInfoKHR";
      case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR: return "RayTracingPipelineInterfaceCreateInfoKHR";
      case StructureType::ePhysicalDeviceRayQueryFeaturesKHR: return "PhysicalDeviceRayQueryFeaturesKHR";
      case StructureType::ePipelineCoverageModulationStateCreateInfoNV:
        return "PipelineCoverageModulationStateCreateInfoNV";
      case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV: return "PhysicalDeviceShaderSmBuiltinsFeaturesNV";
      case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV:
        return "PhysicalDeviceShaderSmBuiltinsPropertiesNV";
      case StructureType::eDrmFormatModifierPropertiesListEXT: return "DrmFormatModifierPropertiesListEXT";
      case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT:
        return "PhysicalDeviceImageDrmFormatModifierInfoEXT";
      case StructureType::eImageDrmFormatModifierListCreateInfoEXT: return "ImageDrmFormatModifierListCreateInfoEXT";
      case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT:
        return "ImageDrmFormatModifierExplicitCreateInfoEXT";
      case StructureType::eImageDrmFormatModifierPropertiesEXT: return "ImageDrmFormatModifierPropertiesEXT";
      case StructureType::eValidationCacheCreateInfoEXT: return "ValidationCacheCreateInfoEXT";
      case StructureType::eShaderModuleValidationCacheCreateInfoEXT: return "ShaderModuleValidationCacheCreateInfoEXT";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR:
        return "PhysicalDevicePortabilitySubsetFeaturesKHR";
      case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR:
        return "PhysicalDevicePortabilitySubsetPropertiesKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV:
        return "PipelineViewportShadingRateImageStateCreateInfoNV";
      case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV: return "PhysicalDeviceShadingRateImageFeaturesNV";
      case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV:
        return "PhysicalDeviceShadingRateImagePropertiesNV";
      case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV:
        return "PipelineViewportCoarseSampleOrderStateCreateInfoNV";
      case StructureType::eRayTracingPipelineCreateInfoNV: return "RayTracingPipelineCreateInfoNV";
      case StructureType::eAccelerationStructureCreateInfoNV: return "AccelerationStructureCreateInfoNV";
      case StructureType::eGeometryNV: return "GeometryNV";
      case StructureType::eGeometryTrianglesNV: return "GeometryTrianglesNV";
      case StructureType::eGeometryAabbNV: return "GeometryAabbNV";
      case StructureType::eBindAccelerationStructureMemoryInfoNV: return "BindAccelerationStructureMemoryInfoNV";
      case StructureType::eWriteDescriptorSetAccelerationStructureNV:
        return "WriteDescriptorSetAccelerationStructureNV";
      case StructureType::eAccelerationStructureMemoryRequirementsInfoNV:
        return "AccelerationStructureMemoryRequirementsInfoNV";
      case StructureType::ePhysicalDeviceRayTracingPropertiesNV: return "PhysicalDeviceRayTracingPropertiesNV";
      case StructureType::eRayTracingShaderGroupCreateInfoNV: return "RayTracingShaderGroupCreateInfoNV";
      case StructureType::eAccelerationStructureInfoNV: return "AccelerationStructureInfoNV";
      case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV:
        return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV";
      case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV:
        return "PipelineRepresentativeFragmentTestStateCreateInfoNV";
      case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT:
        return "PhysicalDeviceImageViewImageFormatInfoEXT";
      case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT:
        return "FilterCubicImageViewImageFormatPropertiesEXT";
      case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT: return "DeviceQueueGlobalPriorityCreateInfoEXT";
      case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT";
      case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT";
      case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT:
        return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
      case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR";
      case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD";
      case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT";
      case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case StructureType::eVideoDecodeH265CapabilitiesEXT: return "VideoDecodeH265CapabilitiesEXT";
      case StructureType::eVideoDecodeH265SessionCreateInfoEXT: return "VideoDecodeH265SessionCreateInfoEXT";
      case StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT:
        return "VideoDecodeH265SessionParametersCreateInfoEXT";
      case StructureType::eVideoDecodeH265SessionParametersAddInfoEXT:
        return "VideoDecodeH265SessionParametersAddInfoEXT";
      case StructureType::eVideoDecodeH265ProfileEXT: return "VideoDecodeH265ProfileEXT";
      case StructureType::eVideoDecodeH265PictureInfoEXT: return "VideoDecodeH265PictureInfoEXT";
      case StructureType::eVideoDecodeH265DpbSlotInfoEXT: return "VideoDecodeH265DpbSlotInfoEXT";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD";
      case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT:
        return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT";
      case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT:
        return "PipelineVertexInputDivisorStateCreateInfoEXT";
      case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT:
        return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT";
#if defined( VK_USE_PLATFORM_GGP )
      case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP";
#endif /*VK_USE_PLATFORM_GGP*/
      case StructureType::ePipelineCreationFeedbackCreateInfoEXT: return "PipelineCreationFeedbackCreateInfoEXT";
      case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV:
        return "PhysicalDeviceComputeShaderDerivativesFeaturesNV";
      case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV";
      case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV";
      case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV:
        return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV";
      case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV:
        return "PhysicalDeviceShaderImageFootprintFeaturesNV";
      case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV:
        return "PipelineViewportExclusiveScissorStateCreateInfoNV";
      case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV";
      case StructureType::eCheckpointDataNV: return "CheckpointDataNV";
      case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV";
      case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL:
        return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL";
      case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL";
      case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL";
      case StructureType::ePerformanceMarkerInfoINTEL: return "PerformanceMarkerInfoINTEL";
      case StructureType::ePerformanceStreamMarkerInfoINTEL: return "PerformanceStreamMarkerInfoINTEL";
      case StructureType::ePerformanceOverrideInfoINTEL: return "PerformanceOverrideInfoINTEL";
      case StructureType::ePerformanceConfigurationAcquireInfoINTEL: return "PerformanceConfigurationAcquireInfoINTEL";
      case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT: return "PhysicalDevicePciBusInfoPropertiesEXT";
      case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD: return "DisplayNativeHdrSurfaceCapabilitiesAMD";
      case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD: return "SwapchainDisplayNativeHdrCreateInfoAMD";
#if defined( VK_USE_PLATFORM_FUCHSIA )
      case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA: return "ImagepipeSurfaceCreateInfoFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      case StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR:
        return "PhysicalDeviceShaderTerminateInvocationFeaturesKHR";
#if defined( VK_USE_PLATFORM_METAL_EXT )
      case StructureType::eMetalSurfaceCreateInfoEXT: return "MetalSurfaceCreateInfoEXT";
#endif /*VK_USE_PLATFORM_METAL_EXT*/
      case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT:
        return "PhysicalDeviceFragmentDensityMapFeaturesEXT";
      case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT:
        return "PhysicalDeviceFragmentDensityMapPropertiesEXT";
      case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT:
        return "RenderPassFragmentDensityMapCreateInfoEXT";
      case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT:
        return "PhysicalDeviceSubgroupSizeControlPropertiesEXT";
      case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT:
        return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT";
      case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT:
        return "PhysicalDeviceSubgroupSizeControlFeaturesEXT";
      case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR";
      case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR:
        return "PipelineFragmentShadingRateStateCreateInfoKHR";
      case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR:
        return "PhysicalDeviceFragmentShadingRatePropertiesKHR";
      case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR:
        return "PhysicalDeviceFragmentShadingRateFeaturesKHR";
      case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR";
      case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD";
      case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD";
      case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT:
        return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT";
      case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT";
      case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT";
      case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT";
      case StructureType::eSurfaceProtectedCapabilitiesKHR: return "SurfaceProtectedCapabilitiesKHR";
      case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV:
        return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV";
      case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT:
        return "PhysicalDeviceBufferDeviceAddressFeaturesEXT";
      case StructureType::eBufferDeviceAddressCreateInfoEXT: return "BufferDeviceAddressCreateInfoEXT";
      case StructureType::ePhysicalDeviceToolPropertiesEXT: return "PhysicalDeviceToolPropertiesEXT";
      case StructureType::eValidationFeaturesEXT: return "ValidationFeaturesEXT";
      case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV:
        return "PhysicalDeviceCooperativeMatrixFeaturesNV";
      case StructureType::eCooperativeMatrixPropertiesNV: return "CooperativeMatrixPropertiesNV";
      case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV:
        return "PhysicalDeviceCooperativeMatrixPropertiesNV";
      case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV:
        return "PhysicalDeviceCoverageReductionModeFeaturesNV";
      case StructureType::ePipelineCoverageReductionStateCreateInfoNV:
        return "PipelineCoverageReductionStateCreateInfoNV";
      case StructureType::eFramebufferMixedSamplesCombinationNV: return "FramebufferMixedSamplesCombinationNV";
      case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT:
        return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT";
      case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT:
        return "PhysicalDeviceYcbcrImageArraysFeaturesEXT";
      case StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT: return "PhysicalDeviceProvokingVertexFeaturesEXT";
      case StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT:
        return "PipelineRasterizationProvokingVertexStateCreateInfoEXT";
      case StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT:
        return "PhysicalDeviceProvokingVertexPropertiesEXT";
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      case StructureType::eSurfaceFullScreenExclusiveInfoEXT: return "SurfaceFullScreenExclusiveInfoEXT";
      case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT:
        return "SurfaceCapabilitiesFullScreenExclusiveEXT";
      case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT";
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT";
      case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT:
        return "PhysicalDeviceLineRasterizationFeaturesEXT";
      case StructureType::ePipelineRasterizationLineStateCreateInfoEXT:
        return "PipelineRasterizationLineStateCreateInfoEXT";
      case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT:
        return "PhysicalDeviceLineRasterizationPropertiesEXT";
      case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT:
        return "PhysicalDeviceShaderAtomicFloatFeaturesEXT";
      case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT: return "PhysicalDeviceIndexTypeUint8FeaturesEXT";
      case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT:
        return "PhysicalDeviceExtendedDynamicStateFeaturesEXT";
      case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR:
        return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR";
      case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR";
      case StructureType::ePipelineExecutablePropertiesKHR: return "PipelineExecutablePropertiesKHR";
      case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR";
      case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR";
      case StructureType::ePipelineExecutableInternalRepresentationKHR:
        return "PipelineExecutableInternalRepresentationKHR";
      case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT:
        return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT";
      case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV:
        return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV";
      case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV";
      case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV";
      case StructureType::eIndirectCommandsLayoutTokenNV: return "IndirectCommandsLayoutTokenNV";
      case StructureType::eIndirectCommandsLayoutCreateInfoNV: return "IndirectCommandsLayoutCreateInfoNV";
      case StructureType::eGeneratedCommandsInfoNV: return "GeneratedCommandsInfoNV";
      case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV:
        return "GeneratedCommandsMemoryRequirementsInfoNV";
      case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV:
        return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV";
      case StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV:
        return "PhysicalDeviceInheritedViewportScissorFeaturesNV";
      case StructureType::eCommandBufferInheritanceViewportScissorInfoNV:
        return "CommandBufferInheritanceViewportScissorInfoNV";
      case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT:
        return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT";
      case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT:
        return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT";
      case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM:
        return "CommandBufferInheritanceRenderPassTransformInfoQCOM";
      case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM";
      case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT:
        return "PhysicalDeviceDeviceMemoryReportFeaturesEXT";
      case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT: return "DeviceDeviceMemoryReportCreateInfoEXT";
      case StructureType::eDeviceMemoryReportCallbackDataEXT: return "DeviceMemoryReportCallbackDataEXT";
      case StructureType::ePhysicalDeviceRobustness2FeaturesEXT: return "PhysicalDeviceRobustness2FeaturesEXT";
      case StructureType::ePhysicalDeviceRobustness2PropertiesEXT: return "PhysicalDeviceRobustness2PropertiesEXT";
      case StructureType::eSamplerCustomBorderColorCreateInfoEXT: return "SamplerCustomBorderColorCreateInfoEXT";
      case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT:
        return "PhysicalDeviceCustomBorderColorPropertiesEXT";
      case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT:
        return "PhysicalDeviceCustomBorderColorFeaturesEXT";
      case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR";
      case StructureType::ePhysicalDevicePrivateDataFeaturesEXT: return "PhysicalDevicePrivateDataFeaturesEXT";
      case StructureType::eDevicePrivateDataCreateInfoEXT: return "DevicePrivateDataCreateInfoEXT";
      case StructureType::ePrivateDataSlotCreateInfoEXT: return "PrivateDataSlotCreateInfoEXT";
      case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT:
        return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case StructureType::eVideoEncodeInfoKHR: return "VideoEncodeInfoKHR";
      case StructureType::eVideoEncodeRateControlInfoKHR: return "VideoEncodeRateControlInfoKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV:
        return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
      case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV";
      case StructureType::eMemoryBarrier2KHR: return "MemoryBarrier2KHR";
      case StructureType::eBufferMemoryBarrier2KHR: return "BufferMemoryBarrier2KHR";
      case StructureType::eImageMemoryBarrier2KHR: return "ImageMemoryBarrier2KHR";
      case StructureType::eDependencyInfoKHR: return "DependencyInfoKHR";
      case StructureType::eSubmitInfo2KHR: return "SubmitInfo2KHR";
      case StructureType::eSemaphoreSubmitInfoKHR: return "SemaphoreSubmitInfoKHR";
      case StructureType::eCommandBufferSubmitInfoKHR: return "CommandBufferSubmitInfoKHR";
      case StructureType::ePhysicalDeviceSynchronization2FeaturesKHR:
        return "PhysicalDeviceSynchronization2FeaturesKHR";
      case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV";
      case StructureType::eCheckpointData2NV: return "CheckpointData2NV";
      case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR:
        return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR";
      case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR:
        return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR";
      case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV:
        return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV";
      case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV:
        return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV";
      case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV:
        return "PipelineFragmentShadingRateEnumStateCreateInfoNV";
      case StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT:
        return "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT";
      case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT:
        return "PhysicalDeviceFragmentDensityMap2FeaturesEXT";
      case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT:
        return "PhysicalDeviceFragmentDensityMap2PropertiesEXT";
      case StructureType::eCopyCommandTransformInfoQCOM: return "CopyCommandTransformInfoQCOM";
      case StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT: return "PhysicalDeviceImageRobustnessFeaturesEXT";
      case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR:
        return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR";
      case StructureType::eCopyBufferInfo2KHR: return "CopyBufferInfo2KHR";
      case StructureType::eCopyImageInfo2KHR: return "CopyImageInfo2KHR";
      case StructureType::eCopyBufferToImageInfo2KHR: return "CopyBufferToImageInfo2KHR";
      case StructureType::eCopyImageToBufferInfo2KHR: return "CopyImageToBufferInfo2KHR";
      case StructureType::eBlitImageInfo2KHR: return "BlitImageInfo2KHR";
      case StructureType::eResolveImageInfo2KHR: return "ResolveImageInfo2KHR";
      case StructureType::eBufferCopy2KHR: return "BufferCopy2KHR";
      case StructureType::eImageCopy2KHR: return "ImageCopy2KHR";
      case StructureType::eImageBlit2KHR: return "ImageBlit2KHR";
      case StructureType::eBufferImageCopy2KHR: return "BufferImageCopy2KHR";
      case StructureType::eImageResolve2KHR: return "ImageResolve2KHR";
      case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT";
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
      case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT";
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
      case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE:
        return "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE";
      case StructureType::eMutableDescriptorTypeCreateInfoVALVE: return "MutableDescriptorTypeCreateInfoVALVE";
      case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT:
        return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT";
      case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT";
      case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT";
#if defined( VK_USE_PLATFORM_FUCHSIA )
      case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA";
      case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA";
      case StructureType::eMemoryGetZirconHandleInfoFUCHSIA: return "MemoryGetZirconHandleInfoFUCHSIA";
      case StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA: return "ImportSemaphoreZirconHandleInfoFUCHSIA";
      case StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA: return "SemaphoreGetZirconHandleInfoFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      case StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT:
        return "PhysicalDeviceExtendedDynamicState2FeaturesEXT";
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
      case StructureType::eScreenSurfaceCreateInfoQNX: return "ScreenSurfaceCreateInfoQNX";
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
      case StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT:
        return "PhysicalDeviceColorWriteEnableFeaturesEXT";
      case StructureType::ePipelineColorWriteCreateInfoEXT: return "PipelineColorWriteCreateInfoEXT";
      case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT:
        return "PhysicalDeviceGlobalPriorityQueryFeaturesEXT";
      case StructureType::eQueueFamilyGlobalPriorityPropertiesEXT: return "QueueFamilyGlobalPriorityPropertiesEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ObjectType
  {
    eUnknown                  = VK_OBJECT_TYPE_UNKNOWN,
    eInstance                 = VK_OBJECT_TYPE_INSTANCE,
    ePhysicalDevice           = VK_OBJECT_TYPE_PHYSICAL_DEVICE,
    eDevice                   = VK_OBJECT_TYPE_DEVICE,
    eQueue                    = VK_OBJECT_TYPE_QUEUE,
    eSemaphore                = VK_OBJECT_TYPE_SEMAPHORE,
    eCommandBuffer            = VK_OBJECT_TYPE_COMMAND_BUFFER,
    eFence                    = VK_OBJECT_TYPE_FENCE,
    eDeviceMemory             = VK_OBJECT_TYPE_DEVICE_MEMORY,
    eBuffer                   = VK_OBJECT_TYPE_BUFFER,
    eImage                    = VK_OBJECT_TYPE_IMAGE,
    eEvent                    = VK_OBJECT_TYPE_EVENT,
    eQueryPool                = VK_OBJECT_TYPE_QUERY_POOL,
    eBufferView               = VK_OBJECT_TYPE_BUFFER_VIEW,
    eImageView                = VK_OBJECT_TYPE_IMAGE_VIEW,
    eShaderModule             = VK_OBJECT_TYPE_SHADER_MODULE,
    ePipelineCache            = VK_OBJECT_TYPE_PIPELINE_CACHE,
    ePipelineLayout           = VK_OBJECT_TYPE_PIPELINE_LAYOUT,
    eRenderPass               = VK_OBJECT_TYPE_RENDER_PASS,
    ePipeline                 = VK_OBJECT_TYPE_PIPELINE,
    eDescriptorSetLayout      = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
    eSampler                  = VK_OBJECT_TYPE_SAMPLER,
    eDescriptorPool           = VK_OBJECT_TYPE_DESCRIPTOR_POOL,
    eDescriptorSet            = VK_OBJECT_TYPE_DESCRIPTOR_SET,
    eFramebuffer              = VK_OBJECT_TYPE_FRAMEBUFFER,
    eCommandPool              = VK_OBJECT_TYPE_COMMAND_POOL,
    eSamplerYcbcrConversion   = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
    eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
    eSurfaceKHR               = VK_OBJECT_TYPE_SURFACE_KHR,
    eSwapchainKHR             = VK_OBJECT_TYPE_SWAPCHAIN_KHR,
    eDisplayKHR               = VK_OBJECT_TYPE_DISPLAY_KHR,
    eDisplayModeKHR           = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
    eDebugReportCallbackEXT   = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoSessionKHR           = VK_OBJECT_TYPE_VIDEO_SESSION_KHR,
    eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eCuModuleNVX                   = VK_OBJECT_TYPE_CU_MODULE_NVX,
    eCuFunctionNVX                 = VK_OBJECT_TYPE_CU_FUNCTION_NVX,
    eDebugUtilsMessengerEXT        = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,
    eAccelerationStructureKHR      = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR,
    eValidationCacheEXT            = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,
    eAccelerationStructureNV       = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,
    ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL,
    eDeferredOperationKHR          = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR,
    eIndirectCommandsLayoutNV      = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV,
    ePrivateDataSlotEXT            = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
    eDescriptorUpdateTemplateKHR   = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
    eSamplerYcbcrConversionKHR     = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( ObjectType value )
  {
    switch ( value )
    {
      case ObjectType::eUnknown: return "Unknown";
      case ObjectType::eInstance: return "Instance";
      case ObjectType::ePhysicalDevice: return "PhysicalDevice";
      case ObjectType::eDevice: return "Device";
      case ObjectType::eQueue: return "Queue";
      case ObjectType::eSemaphore: return "Semaphore";
      case ObjectType::eCommandBuffer: return "CommandBuffer";
      case ObjectType::eFence: return "Fence";
      case ObjectType::eDeviceMemory: return "DeviceMemory";
      case ObjectType::eBuffer: return "Buffer";
      case ObjectType::eImage: return "Image";
      case ObjectType::eEvent: return "Event";
      case ObjectType::eQueryPool: return "QueryPool";
      case ObjectType::eBufferView: return "BufferView";
      case ObjectType::eImageView: return "ImageView";
      case ObjectType::eShaderModule: return "ShaderModule";
      case ObjectType::ePipelineCache: return "PipelineCache";
      case ObjectType::ePipelineLayout: return "PipelineLayout";
      case ObjectType::eRenderPass: return "RenderPass";
      case ObjectType::ePipeline: return "Pipeline";
      case ObjectType::eDescriptorSetLayout: return "DescriptorSetLayout";
      case ObjectType::eSampler: return "Sampler";
      case ObjectType::eDescriptorPool: return "DescriptorPool";
      case ObjectType::eDescriptorSet: return "DescriptorSet";
      case ObjectType::eFramebuffer: return "Framebuffer";
      case ObjectType::eCommandPool: return "CommandPool";
      case ObjectType::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
      case ObjectType::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
      case ObjectType::eSurfaceKHR: return "SurfaceKHR";
      case ObjectType::eSwapchainKHR: return "SwapchainKHR";
      case ObjectType::eDisplayKHR: return "DisplayKHR";
      case ObjectType::eDisplayModeKHR: return "DisplayModeKHR";
      case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case ObjectType::eVideoSessionKHR: return "VideoSessionKHR";
      case ObjectType::eVideoSessionParametersKHR: return "VideoSessionParametersKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case ObjectType::eCuModuleNVX: return "CuModuleNVX";
      case ObjectType::eCuFunctionNVX: return "CuFunctionNVX";
      case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT";
      case ObjectType::eAccelerationStructureKHR: return "AccelerationStructureKHR";
      case ObjectType::eValidationCacheEXT: return "ValidationCacheEXT";
      case ObjectType::eAccelerationStructureNV: return "AccelerationStructureNV";
      case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL";
      case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR";
      case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV";
      case ObjectType::ePrivateDataSlotEXT: return "PrivateDataSlotEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VendorId
  {
    eVIV      = VK_VENDOR_ID_VIV,
    eVSI      = VK_VENDOR_ID_VSI,
    eKazan    = VK_VENDOR_ID_KAZAN,
    eCodeplay = VK_VENDOR_ID_CODEPLAY,
    eMESA     = VK_VENDOR_ID_MESA,
    ePocl     = VK_VENDOR_ID_POCL
  };

  VULKAN_HPP_INLINE std::string to_string( VendorId value )
  {
    switch ( value )
    {
      case VendorId::eVIV: return "VIV";
      case VendorId::eVSI: return "VSI";
      case VendorId::eKazan: return "Kazan";
      case VendorId::eCodeplay: return "Codeplay";
      case VendorId::eMESA: return "MESA";
      case VendorId::ePocl: return "Pocl";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PipelineCacheHeaderVersion
  {
    eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value )
  {
    switch ( value )
    {
      case PipelineCacheHeaderVersion::eOne: return "One";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class Format
  {
    eUndefined                               = VK_FORMAT_UNDEFINED,
    eR4G4UnormPack8                          = VK_FORMAT_R4G4_UNORM_PACK8,
    eR4G4B4A4UnormPack16                     = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
    eB4G4R4A4UnormPack16                     = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
    eR5G6B5UnormPack16                       = VK_FORMAT_R5G6B5_UNORM_PACK16,
    eB5G6R5UnormPack16                       = VK_FORMAT_B5G6R5_UNORM_PACK16,
    eR5G5B5A1UnormPack16                     = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
    eB5G5R5A1UnormPack16                     = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
    eA1R5G5B5UnormPack16                     = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
    eR8Unorm                                 = VK_FORMAT_R8_UNORM,
    eR8Snorm                                 = VK_FORMAT_R8_SNORM,
    eR8Uscaled                               = VK_FORMAT_R8_USCALED,
    eR8Sscaled                               = VK_FORMAT_R8_SSCALED,
    eR8Uint                                  = VK_FORMAT_R8_UINT,
    eR8Sint                                  = VK_FORMAT_R8_SINT,
    eR8Srgb                                  = VK_FORMAT_R8_SRGB,
    eR8G8Unorm                               = VK_FORMAT_R8G8_UNORM,
    eR8G8Snorm                               = VK_FORMAT_R8G8_SNORM,
    eR8G8Uscaled                             = VK_FORMAT_R8G8_USCALED,
    eR8G8Sscaled                             = VK_FORMAT_R8G8_SSCALED,
    eR8G8Uint                                = VK_FORMAT_R8G8_UINT,
    eR8G8Sint                                = VK_FORMAT_R8G8_SINT,
    eR8G8Srgb                                = VK_FORMAT_R8G8_SRGB,
    eR8G8B8Unorm                             = VK_FORMAT_R8G8B8_UNORM,
    eR8G8B8Snorm                             = VK_FORMAT_R8G8B8_SNORM,
    eR8G8B8Uscaled                           = VK_FORMAT_R8G8B8_USCALED,
    eR8G8B8Sscaled                           = VK_FORMAT_R8G8B8_SSCALED,
    eR8G8B8Uint                              = VK_FORMAT_R8G8B8_UINT,
    eR8G8B8Sint                              = VK_FORMAT_R8G8B8_SINT,
    eR8G8B8Srgb                              = VK_FORMAT_R8G8B8_SRGB,
    eB8G8R8Unorm                             = VK_FORMAT_B8G8R8_UNORM,
    eB8G8R8Snorm                             = VK_FORMAT_B8G8R8_SNORM,
    eB8G8R8Uscaled                           = VK_FORMAT_B8G8R8_USCALED,
    eB8G8R8Sscaled                           = VK_FORMAT_B8G8R8_SSCALED,
    eB8G8R8Uint                              = VK_FORMAT_B8G8R8_UINT,
    eB8G8R8Sint                              = VK_FORMAT_B8G8R8_SINT,
    eB8G8R8Srgb                              = VK_FORMAT_B8G8R8_SRGB,
    eR8G8B8A8Unorm                           = VK_FORMAT_R8G8B8A8_UNORM,
    eR8G8B8A8Snorm                           = VK_FORMAT_R8G8B8A8_SNORM,
    eR8G8B8A8Uscaled                         = VK_FORMAT_R8G8B8A8_USCALED,
    eR8G8B8A8Sscaled                         = VK_FORMAT_R8G8B8A8_SSCALED,
    eR8G8B8A8Uint                            = VK_FORMAT_R8G8B8A8_UINT,
    eR8G8B8A8Sint                            = VK_FORMAT_R8G8B8A8_SINT,
    eR8G8B8A8Srgb                            = VK_FORMAT_R8G8B8A8_SRGB,
    eB8G8R8A8Unorm                           = VK_FORMAT_B8G8R8A8_UNORM,
    eB8G8R8A8Snorm                           = VK_FORMAT_B8G8R8A8_SNORM,
    eB8G8R8A8Uscaled                         = VK_FORMAT_B8G8R8A8_USCALED,
    eB8G8R8A8Sscaled                         = VK_FORMAT_B8G8R8A8_SSCALED,
    eB8G8R8A8Uint                            = VK_FORMAT_B8G8R8A8_UINT,
    eB8G8R8A8Sint                            = VK_FORMAT_B8G8R8A8_SINT,
    eB8G8R8A8Srgb                            = VK_FORMAT_B8G8R8A8_SRGB,
    eA8B8G8R8UnormPack32                     = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
    eA8B8G8R8SnormPack32                     = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
    eA8B8G8R8UscaledPack32                   = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
    eA8B8G8R8SscaledPack32                   = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
    eA8B8G8R8UintPack32                      = VK_FORMAT_A8B8G8R8_UINT_PACK32,
    eA8B8G8R8SintPack32                      = VK_FORMAT_A8B8G8R8_SINT_PACK32,
    eA8B8G8R8SrgbPack32                      = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
    eA2R10G10B10UnormPack32                  = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
    eA2R10G10B10SnormPack32                  = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
    eA2R10G10B10UscaledPack32                = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
    eA2R10G10B10SscaledPack32                = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
    eA2R10G10B10UintPack32                   = VK_FORMAT_A2R10G10B10_UINT_PACK32,
    eA2R10G10B10SintPack32                   = VK_FORMAT_A2R10G10B10_SINT_PACK32,
    eA2B10G10R10UnormPack32                  = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
    eA2B10G10R10SnormPack32                  = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
    eA2B10G10R10UscaledPack32                = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
    eA2B10G10R10SscaledPack32                = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
    eA2B10G10R10UintPack32                   = VK_FORMAT_A2B10G10R10_UINT_PACK32,
    eA2B10G10R10SintPack32                   = VK_FORMAT_A2B10G10R10_SINT_PACK32,
    eR16Unorm                                = VK_FORMAT_R16_UNORM,
    eR16Snorm                                = VK_FORMAT_R16_SNORM,
    eR16Uscaled                              = VK_FORMAT_R16_USCALED,
    eR16Sscaled                              = VK_FORMAT_R16_SSCALED,
    eR16Uint                                 = VK_FORMAT_R16_UINT,
    eR16Sint                                 = VK_FORMAT_R16_SINT,
    eR16Sfloat                               = VK_FORMAT_R16_SFLOAT,
    eR16G16Unorm                             = VK_FORMAT_R16G16_UNORM,
    eR16G16Snorm                             = VK_FORMAT_R16G16_SNORM,
    eR16G16Uscaled                           = VK_FORMAT_R16G16_USCALED,
    eR16G16Sscaled                           = VK_FORMAT_R16G16_SSCALED,
    eR16G16Uint                              = VK_FORMAT_R16G16_UINT,
    eR16G16Sint                              = VK_FORMAT_R16G16_SINT,
    eR16G16Sfloat                            = VK_FORMAT_R16G16_SFLOAT,
    eR16G16B16Unorm                          = VK_FORMAT_R16G16B16_UNORM,
    eR16G16B16Snorm                          = VK_FORMAT_R16G16B16_SNORM,
    eR16G16B16Uscaled                        = VK_FORMAT_R16G16B16_USCALED,
    eR16G16B16Sscaled                        = VK_FORMAT_R16G16B16_SSCALED,
    eR16G16B16Uint                           = VK_FORMAT_R16G16B16_UINT,
    eR16G16B16Sint                           = VK_FORMAT_R16G16B16_SINT,
    eR16G16B16Sfloat                         = VK_FORMAT_R16G16B16_SFLOAT,
    eR16G16B16A16Unorm                       = VK_FORMAT_R16G16B16A16_UNORM,
    eR16G16B16A16Snorm                       = VK_FORMAT_R16G16B16A16_SNORM,
    eR16G16B16A16Uscaled                     = VK_FORMAT_R16G16B16A16_USCALED,
    eR16G16B16A16Sscaled                     = VK_FORMAT_R16G16B16A16_SSCALED,
    eR16G16B16A16Uint                        = VK_FORMAT_R16G16B16A16_UINT,
    eR16G16B16A16Sint                        = VK_FORMAT_R16G16B16A16_SINT,
    eR16G16B16A16Sfloat                      = VK_FORMAT_R16G16B16A16_SFLOAT,
    eR32Uint                                 = VK_FORMAT_R32_UINT,
    eR32Sint                                 = VK_FORMAT_R32_SINT,
    eR32Sfloat                               = VK_FORMAT_R32_SFLOAT,
    eR32G32Uint                              = VK_FORMAT_R32G32_UINT,
    eR32G32Sint                              = VK_FORMAT_R32G32_SINT,
    eR32G32Sfloat                            = VK_FORMAT_R32G32_SFLOAT,
    eR32G32B32Uint                           = VK_FORMAT_R32G32B32_UINT,
    eR32G32B32Sint                           = VK_FORMAT_R32G32B32_SINT,
    eR32G32B32Sfloat                         = VK_FORMAT_R32G32B32_SFLOAT,
    eR32G32B32A32Uint                        = VK_FORMAT_R32G32B32A32_UINT,
    eR32G32B32A32Sint                        = VK_FORMAT_R32G32B32A32_SINT,
    eR32G32B32A32Sfloat                      = VK_FORMAT_R32G32B32A32_SFLOAT,
    eR64Uint                                 = VK_FORMAT_R64_UINT,
    eR64Sint                                 = VK_FORMAT_R64_SINT,
    eR64Sfloat                               = VK_FORMAT_R64_SFLOAT,
    eR64G64Uint                              = VK_FORMAT_R64G64_UINT,
    eR64G64Sint                              = VK_FORMAT_R64G64_SINT,
    eR64G64Sfloat                            = VK_FORMAT_R64G64_SFLOAT,
    eR64G64B64Uint                           = VK_FORMAT_R64G64B64_UINT,
    eR64G64B64Sint                           = VK_FORMAT_R64G64B64_SINT,
    eR64G64B64Sfloat                         = VK_FORMAT_R64G64B64_SFLOAT,
    eR64G64B64A64Uint                        = VK_FORMAT_R64G64B64A64_UINT,
    eR64G64B64A64Sint                        = VK_FORMAT_R64G64B64A64_SINT,
    eR64G64B64A64Sfloat                      = VK_FORMAT_R64G64B64A64_SFLOAT,
    eB10G11R11UfloatPack32                   = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
    eE5B9G9R9UfloatPack32                    = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
    eD16Unorm                                = VK_FORMAT_D16_UNORM,
    eX8D24UnormPack32                        = VK_FORMAT_X8_D24_UNORM_PACK32,
    eD32Sfloat                               = VK_FORMAT_D32_SFLOAT,
    eS8Uint                                  = VK_FORMAT_S8_UINT,
    eD16UnormS8Uint                          = VK_FORMAT_D16_UNORM_S8_UINT,
    eD24UnormS8Uint                          = VK_FORMAT_D24_UNORM_S8_UINT,
    eD32SfloatS8Uint                         = VK_FORMAT_D32_SFLOAT_S8_UINT,
    eBc1RgbUnormBlock                        = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
    eBc1RgbSrgbBlock                         = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
    eBc1RgbaUnormBlock                       = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
    eBc1RgbaSrgbBlock                        = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
    eBc2UnormBlock                           = VK_FORMAT_BC2_UNORM_BLOCK,
    eBc2SrgbBlock                            = VK_FORMAT_BC2_SRGB_BLOCK,
    eBc3UnormBlock                           = VK_FORMAT_BC3_UNORM_BLOCK,
    eBc3SrgbBlock                            = VK_FORMAT_BC3_SRGB_BLOCK,
    eBc4UnormBlock                           = VK_FORMAT_BC4_UNORM_BLOCK,
    eBc4SnormBlock                           = VK_FORMAT_BC4_SNORM_BLOCK,
    eBc5UnormBlock                           = VK_FORMAT_BC5_UNORM_BLOCK,
    eBc5SnormBlock                           = VK_FORMAT_BC5_SNORM_BLOCK,
    eBc6HUfloatBlock                         = VK_FORMAT_BC6H_UFLOAT_BLOCK,
    eBc6HSfloatBlock                         = VK_FORMAT_BC6H_SFLOAT_BLOCK,
    eBc7UnormBlock                           = VK_FORMAT_BC7_UNORM_BLOCK,
    eBc7SrgbBlock                            = VK_FORMAT_BC7_SRGB_BLOCK,
    eEtc2R8G8B8UnormBlock                    = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
    eEtc2R8G8B8SrgbBlock                     = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
    eEtc2R8G8B8A1UnormBlock                  = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
    eEtc2R8G8B8A1SrgbBlock                   = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
    eEtc2R8G8B8A8UnormBlock                  = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
    eEtc2R8G8B8A8SrgbBlock                   = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
    eEacR11UnormBlock                        = VK_FORMAT_EAC_R11_UNORM_BLOCK,
    eEacR11SnormBlock                        = VK_FORMAT_EAC_R11_SNORM_BLOCK,
    eEacR11G11UnormBlock                     = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
    eEacR11G11SnormBlock                     = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
    eAstc4x4UnormBlock                       = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
    eAstc4x4SrgbBlock                        = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
    eAstc5x4UnormBlock                       = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
    eAstc5x4SrgbBlock                        = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
    eAstc5x5UnormBlock                       = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
    eAstc5x5SrgbBlock                        = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
    eAstc6x5UnormBlock                       = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
    eAstc6x5SrgbBlock                        = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
    eAstc6x6UnormBlock                       = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
    eAstc6x6SrgbBlock                        = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
    eAstc8x5UnormBlock                       = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
    eAstc8x5SrgbBlock                        = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
    eAstc8x6UnormBlock                       = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
    eAstc8x6SrgbBlock                        = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
    eAstc8x8UnormBlock                       = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
    eAstc8x8SrgbBlock                        = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
    eAstc10x5UnormBlock                      = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
    eAstc10x5SrgbBlock                       = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
    eAstc10x6UnormBlock                      = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
    eAstc10x6SrgbBlock                       = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
    eAstc10x8UnormBlock                      = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
    eAstc10x8SrgbBlock                       = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
    eAstc10x10UnormBlock                     = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
    eAstc10x10SrgbBlock                      = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
    eAstc12x10UnormBlock                     = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
    eAstc12x10SrgbBlock                      = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
    eAstc12x12UnormBlock                     = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
    eAstc12x12SrgbBlock                      = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
    eG8B8G8R8422Unorm                        = VK_FORMAT_G8B8G8R8_422_UNORM,
    eB8G8R8G8422Unorm                        = VK_FORMAT_B8G8R8G8_422_UNORM,
    eG8B8R83Plane420Unorm                    = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
    eG8B8R82Plane420Unorm                    = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
    eG8B8R83Plane422Unorm                    = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
    eG8B8R82Plane422Unorm                    = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
    eG8B8R83Plane444Unorm                    = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
    eR10X6UnormPack16                        = VK_FORMAT_R10X6_UNORM_PACK16,
    eR10X6G10X6Unorm2Pack16                  = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
    eR10X6G10X6B10X6A10X6Unorm4Pack16        = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
    eG10X6B10X6G10X6R10X6422Unorm4Pack16     = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
    eB10X6G10X6R10X6G10X6422Unorm4Pack16     = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
    eG10X6B10X6R10X63Plane420Unorm3Pack16    = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
    eG10X6B10X6R10X62Plane420Unorm3Pack16    = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
    eG10X6B10X6R10X63Plane422Unorm3Pack16    = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
    eG10X6B10X6R10X62Plane422Unorm3Pack16    = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
    eG10X6B10X6R10X63Plane444Unorm3Pack16    = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
    eR12X4UnormPack16                        = VK_FORMAT_R12X4_UNORM_PACK16,
    eR12X4G12X4Unorm2Pack16                  = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
    eR12X4G12X4B12X4A12X4Unorm4Pack16        = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
    eG12X4B12X4G12X4R12X4422Unorm4Pack16     = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
    eB12X4G12X4R12X4G12X4422Unorm4Pack16     = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
    eG12X4B12X4R12X43Plane420Unorm3Pack16    = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
    eG12X4B12X4R12X42Plane420Unorm3Pack16    = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
    eG12X4B12X4R12X43Plane422Unorm3Pack16    = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
    eG12X4B12X4R12X42Plane422Unorm3Pack16    = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
    eG12X4B12X4R12X43Plane444Unorm3Pack16    = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
    eG16B16G16R16422Unorm                    = VK_FORMAT_G16B16G16R16_422_UNORM,
    eB16G16R16G16422Unorm                    = VK_FORMAT_B16G16R16G16_422_UNORM,
    eG16B16R163Plane420Unorm                 = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
    eG16B16R162Plane420Unorm                 = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
    eG16B16R163Plane422Unorm                 = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
    eG16B16R162Plane422Unorm                 = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
    eG16B16R163Plane444Unorm                 = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
    ePvrtc12BppUnormBlockIMG                 = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
    ePvrtc14BppUnormBlockIMG                 = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
    ePvrtc22BppUnormBlockIMG                 = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
    ePvrtc24BppUnormBlockIMG                 = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
    ePvrtc12BppSrgbBlockIMG                  = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
    ePvrtc14BppSrgbBlockIMG                  = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
    ePvrtc22BppSrgbBlockIMG                  = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
    ePvrtc24BppSrgbBlockIMG                  = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
    eAstc4x4SfloatBlockEXT                   = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT,
    eAstc5x4SfloatBlockEXT                   = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT,
    eAstc5x5SfloatBlockEXT                   = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT,
    eAstc6x5SfloatBlockEXT                   = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT,
    eAstc6x6SfloatBlockEXT                   = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT,
    eAstc8x5SfloatBlockEXT                   = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT,
    eAstc8x6SfloatBlockEXT                   = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT,
    eAstc8x8SfloatBlockEXT                   = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT,
    eAstc10x5SfloatBlockEXT                  = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT,
    eAstc10x6SfloatBlockEXT                  = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT,
    eAstc10x8SfloatBlockEXT                  = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT,
    eAstc10x10SfloatBlockEXT                 = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT,
    eAstc12x10SfloatBlockEXT                 = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT,
    eAstc12x12SfloatBlockEXT                 = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT,
    eG8B8R82Plane444UnormEXT                 = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT,
    eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT,
    eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT,
    eG16B16R162Plane444UnormEXT              = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT,
    eA4R4G4B4UnormPack16EXT                  = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
    eA4B4G4R4UnormPack16EXT                  = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
    eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR  = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
    eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR  = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
    eB16G16R16G16422UnormKHR                 = VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
    eB8G8R8G8422UnormKHR                     = VK_FORMAT_B8G8R8G8_422_UNORM_KHR,
    eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR  = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR,
    eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR,
    eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR,
    eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
    eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
    eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR,
    eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR  = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR,
    eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR,
    eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR,
    eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
    eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
    eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR,
    eG16B16G16R16422UnormKHR                 = VK_FORMAT_G16B16G16R16_422_UNORM_KHR,
    eG16B16R162Plane420UnormKHR              = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR,
    eG16B16R162Plane422UnormKHR              = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR,
    eG16B16R163Plane420UnormKHR              = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
    eG16B16R163Plane422UnormKHR              = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
    eG16B16R163Plane444UnormKHR              = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR,
    eG8B8G8R8422UnormKHR                     = VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
    eG8B8R82Plane420UnormKHR                 = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
    eG8B8R82Plane422UnormKHR                 = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
    eG8B8R83Plane420UnormKHR                 = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
    eG8B8R83Plane422UnormKHR                 = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
    eG8B8R83Plane444UnormKHR                 = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
    eR10X6G10X6B10X6A10X6Unorm4Pack16KHR     = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
    eR10X6G10X6Unorm2Pack16KHR               = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
    eR10X6UnormPack16KHR                     = VK_FORMAT_R10X6_UNORM_PACK16_KHR,
    eR12X4G12X4B12X4A12X4Unorm4Pack16KHR     = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
    eR12X4G12X4Unorm2Pack16KHR               = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
    eR12X4UnormPack16KHR                     = VK_FORMAT_R12X4_UNORM_PACK16_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( Format value )
  {
    switch ( value )
    {
      case Format::eUndefined: return "Undefined";
      case Format::eR4G4UnormPack8: return "R4G4UnormPack8";
      case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16";
      case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16";
      case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16";
      case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16";
      case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16";
      case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16";
      case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16";
      case Format::eR8Unorm: return "R8Unorm";
      case Format::eR8Snorm: return "R8Snorm";
      case Format::eR8Uscaled: return "R8Uscaled";
      case Format::eR8Sscaled: return "R8Sscaled";
      case Format::eR8Uint: return "R8Uint";
      case Format::eR8Sint: return "R8Sint";
      case Format::eR8Srgb: return "R8Srgb";
      case Format::eR8G8Unorm: return "R8G8Unorm";
      case Format::eR8G8Snorm: return "R8G8Snorm";
      case Format::eR8G8Uscaled: return "R8G8Uscaled";
      case Format::eR8G8Sscaled: return "R8G8Sscaled";
      case Format::eR8G8Uint: return "R8G8Uint";
      case Format::eR8G8Sint: return "R8G8Sint";
      case Format::eR8G8Srgb: return "R8G8Srgb";
      case Format::eR8G8B8Unorm: return "R8G8B8Unorm";
      case Format::eR8G8B8Snorm: return "R8G8B8Snorm";
      case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled";
      case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled";
      case Format::eR8G8B8Uint: return "R8G8B8Uint";
      case Format::eR8G8B8Sint: return "R8G8B8Sint";
      case Format::eR8G8B8Srgb: return "R8G8B8Srgb";
      case Format::eB8G8R8Unorm: return "B8G8R8Unorm";
      case Format::eB8G8R8Snorm: return "B8G8R8Snorm";
      case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled";
      case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled";
      case Format::eB8G8R8Uint: return "B8G8R8Uint";
      case Format::eB8G8R8Sint: return "B8G8R8Sint";
      case Format::eB8G8R8Srgb: return "B8G8R8Srgb";
      case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm";
      case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm";
      case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled";
      case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled";
      case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint";
      case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint";
      case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb";
      case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm";
      case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm";
      case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled";
      case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled";
      case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint";
      case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint";
      case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb";
      case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32";
      case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32";
      case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32";
      case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32";
      case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32";
      case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32";
      case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32";
      case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32";
      case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32";
      case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32";
      case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32";
      case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32";
      case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32";
      case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32";
      case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32";
      case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32";
      case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32";
      case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32";
      case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32";
      case Format::eR16Unorm: return "R16Unorm";
      case Format::eR16Snorm: return "R16Snorm";
      case Format::eR16Uscaled: return "R16Uscaled";
      case Format::eR16Sscaled: return "R16Sscaled";
      case Format::eR16Uint: return "R16Uint";
      case Format::eR16Sint: return "R16Sint";
      case Format::eR16Sfloat: return "R16Sfloat";
      case Format::eR16G16Unorm: return "R16G16Unorm";
      case Format::eR16G16Snorm: return "R16G16Snorm";
      case Format::eR16G16Uscaled: return "R16G16Uscaled";
      case Format::eR16G16Sscaled: return "R16G16Sscaled";
      case Format::eR16G16Uint: return "R16G16Uint";
      case Format::eR16G16Sint: return "R16G16Sint";
      case Format::eR16G16Sfloat: return "R16G16Sfloat";
      case Format::eR16G16B16Unorm: return "R16G16B16Unorm";
      case Format::eR16G16B16Snorm: return "R16G16B16Snorm";
      case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled";
      case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled";
      case Format::eR16G16B16Uint: return "R16G16B16Uint";
      case Format::eR16G16B16Sint: return "R16G16B16Sint";
      case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat";
      case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm";
      case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm";
      case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled";
      case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled";
      case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint";
      case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint";
      case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat";
      case Format::eR32Uint: return "R32Uint";
      case Format::eR32Sint: return "R32Sint";
      case Format::eR32Sfloat: return "R32Sfloat";
      case Format::eR32G32Uint: return "R32G32Uint";
      case Format::eR32G32Sint: return "R32G32Sint";
      case Format::eR32G32Sfloat: return "R32G32Sfloat";
      case Format::eR32G32B32Uint: return "R32G32B32Uint";
      case Format::eR32G32B32Sint: return "R32G32B32Sint";
      case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat";
      case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint";
      case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint";
      case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat";
      case Format::eR64Uint: return "R64Uint";
      case Format::eR64Sint: return "R64Sint";
      case Format::eR64Sfloat: return "R64Sfloat";
      case Format::eR64G64Uint: return "R64G64Uint";
      case Format::eR64G64Sint: return "R64G64Sint";
      case Format::eR64G64Sfloat: return "R64G64Sfloat";
      case Format::eR64G64B64Uint: return "R64G64B64Uint";
      case Format::eR64G64B64Sint: return "R64G64B64Sint";
      case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat";
      case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint";
      case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint";
      case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat";
      case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32";
      case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32";
      case Format::eD16Unorm: return "D16Unorm";
      case Format::eX8D24UnormPack32: return "X8D24UnormPack32";
      case Format::eD32Sfloat: return "D32Sfloat";
      case Format::eS8Uint: return "S8Uint";
      case Format::eD16UnormS8Uint: return "D16UnormS8Uint";
      case Format::eD24UnormS8Uint: return "D24UnormS8Uint";
      case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint";
      case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock";
      case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock";
      case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock";
      case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock";
      case Format::eBc2UnormBlock: return "Bc2UnormBlock";
      case Format::eBc2SrgbBlock: return "Bc2SrgbBlock";
      case Format::eBc3UnormBlock: return "Bc3UnormBlock";
      case Format::eBc3SrgbBlock: return "Bc3SrgbBlock";
      case Format::eBc4UnormBlock: return "Bc4UnormBlock";
      case Format::eBc4SnormBlock: return "Bc4SnormBlock";
      case Format::eBc5UnormBlock: return "Bc5UnormBlock";
      case Format::eBc5SnormBlock: return "Bc5SnormBlock";
      case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock";
      case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock";
      case Format::eBc7UnormBlock: return "Bc7UnormBlock";
      case Format::eBc7SrgbBlock: return "Bc7SrgbBlock";
      case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock";
      case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock";
      case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock";
      case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock";
      case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock";
      case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock";
      case Format::eEacR11UnormBlock: return "EacR11UnormBlock";
      case Format::eEacR11SnormBlock: return "EacR11SnormBlock";
      case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock";
      case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock";
      case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock";
      case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock";
      case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock";
      case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock";
      case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock";
      case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock";
      case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock";
      case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock";
      case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock";
      case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock";
      case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock";
      case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock";
      case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock";
      case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock";
      case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock";
      case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock";
      case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock";
      case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock";
      case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock";
      case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock";
      case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock";
      case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock";
      case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock";
      case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock";
      case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock";
      case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock";
      case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock";
      case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock";
      case Format::eG8B8G8R8422Unorm: return "G8B8G8R8422Unorm";
      case Format::eB8G8R8G8422Unorm: return "B8G8R8G8422Unorm";
      case Format::eG8B8R83Plane420Unorm: return "G8B8R83Plane420Unorm";
      case Format::eG8B8R82Plane420Unorm: return "G8B8R82Plane420Unorm";
      case Format::eG8B8R83Plane422Unorm: return "G8B8R83Plane422Unorm";
      case Format::eG8B8R82Plane422Unorm: return "G8B8R82Plane422Unorm";
      case Format::eG8B8R83Plane444Unorm: return "G8B8R83Plane444Unorm";
      case Format::eR10X6UnormPack16: return "R10X6UnormPack16";
      case Format::eR10X6G10X6Unorm2Pack16: return "R10X6G10X6Unorm2Pack16";
      case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "R10X6G10X6B10X6A10X6Unorm4Pack16";
      case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "G10X6B10X6G10X6R10X6422Unorm4Pack16";
      case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "B10X6G10X6R10X6G10X6422Unorm4Pack16";
      case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "G10X6B10X6R10X63Plane420Unorm3Pack16";
      case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "G10X6B10X6R10X62Plane420Unorm3Pack16";
      case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "G10X6B10X6R10X63Plane422Unorm3Pack16";
      case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "G10X6B10X6R10X62Plane422Unorm3Pack16";
      case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "G10X6B10X6R10X63Plane444Unorm3Pack16";
      case Format::eR12X4UnormPack16: return "R12X4UnormPack16";
      case Format::eR12X4G12X4Unorm2Pack16: return "R12X4G12X4Unorm2Pack16";
      case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "R12X4G12X4B12X4A12X4Unorm4Pack16";
      case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "G12X4B12X4G12X4R12X4422Unorm4Pack16";
      case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "B12X4G12X4R12X4G12X4422Unorm4Pack16";
      case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "G12X4B12X4R12X43Plane420Unorm3Pack16";
      case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "G12X4B12X4R12X42Plane420Unorm3Pack16";
      case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "G12X4B12X4R12X43Plane422Unorm3Pack16";
      case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "G12X4B12X4R12X42Plane422Unorm3Pack16";
      case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "G12X4B12X4R12X43Plane444Unorm3Pack16";
      case Format::eG16B16G16R16422Unorm: return "G16B16G16R16422Unorm";
      case Format::eB16G16R16G16422Unorm: return "B16G16R16G16422Unorm";
      case Format::eG16B16R163Plane420Unorm: return "G16B16R163Plane420Unorm";
      case Format::eG16B16R162Plane420Unorm: return "G16B16R162Plane420Unorm";
      case Format::eG16B16R163Plane422Unorm: return "G16B16R163Plane422Unorm";
      case Format::eG16B16R162Plane422Unorm: return "G16B16R162Plane422Unorm";
      case Format::eG16B16R163Plane444Unorm: return "G16B16R163Plane444Unorm";
      case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG";
      case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG";
      case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG";
      case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG";
      case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG";
      case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
      case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
      case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
      case Format::eAstc4x4SfloatBlockEXT: return "Astc4x4SfloatBlockEXT";
      case Format::eAstc5x4SfloatBlockEXT: return "Astc5x4SfloatBlockEXT";
      case Format::eAstc5x5SfloatBlockEXT: return "Astc5x5SfloatBlockEXT";
      case Format::eAstc6x5SfloatBlockEXT: return "Astc6x5SfloatBlockEXT";
      case Format::eAstc6x6SfloatBlockEXT: return "Astc6x6SfloatBlockEXT";
      case Format::eAstc8x5SfloatBlockEXT: return "Astc8x5SfloatBlockEXT";
      case Format::eAstc8x6SfloatBlockEXT: return "Astc8x6SfloatBlockEXT";
      case Format::eAstc8x8SfloatBlockEXT: return "Astc8x8SfloatBlockEXT";
      case Format::eAstc10x5SfloatBlockEXT: return "Astc10x5SfloatBlockEXT";
      case Format::eAstc10x6SfloatBlockEXT: return "Astc10x6SfloatBlockEXT";
      case Format::eAstc10x8SfloatBlockEXT: return "Astc10x8SfloatBlockEXT";
      case Format::eAstc10x10SfloatBlockEXT: return "Astc10x10SfloatBlockEXT";
      case Format::eAstc12x10SfloatBlockEXT: return "Astc12x10SfloatBlockEXT";
      case Format::eAstc12x12SfloatBlockEXT: return "Astc12x12SfloatBlockEXT";
      case Format::eG8B8R82Plane444UnormEXT: return "G8B8R82Plane444UnormEXT";
      case Format::eG10X6B10X6R10X62Plane444Unorm3Pack16EXT: return "G10X6B10X6R10X62Plane444Unorm3Pack16EXT";
      case Format::eG12X4B12X4R12X42Plane444Unorm3Pack16EXT: return "G12X4B12X4R12X42Plane444Unorm3Pack16EXT";
      case Format::eG16B16R162Plane444UnormEXT: return "G16B16R162Plane444UnormEXT";
      case Format::eA4R4G4B4UnormPack16EXT: return "A4R4G4B4UnormPack16EXT";
      case Format::eA4B4G4R4UnormPack16EXT: return "A4B4G4R4UnormPack16EXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class FormatFeatureFlagBits : VkFormatFeatureFlags
  {
    eSampledImage                            = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
    eStorageImage                            = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
    eStorageImageAtomic                      = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
    eUniformTexelBuffer                      = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
    eStorageTexelBuffer                      = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
    eStorageTexelBufferAtomic                = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
    eVertexBuffer                            = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
    eColorAttachment                         = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
    eColorAttachmentBlend                    = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
    eDepthStencilAttachment                  = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
    eBlitSrc                                 = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
    eBlitDst                                 = VK_FORMAT_FEATURE_BLIT_DST_BIT,
    eSampledImageFilterLinear                = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
    eTransferSrc                             = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
    eTransferDst                             = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
    eMidpointChromaSamples                   = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
    eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
    eSampledImageYcbcrConversionSeparateReconstructionFilter =
      VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
    eSampledImageYcbcrConversionChromaReconstructionExplicit =
      VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
    eSampledImageYcbcrConversionChromaReconstructionExplicitForceable =
      VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
    eDisjoint                   = VK_FORMAT_FEATURE_DISJOINT_BIT,
    eCositedChromaSamples       = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
    eSampledImageFilterMinmax   = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
    eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR,
    eVideoDecodeDpbKHR    = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
    eFragmentDensityMapEXT                = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
    eFragmentShadingRateAttachmentKHR     = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR,
    eVideoEncodeDpbKHR   = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eCositedChromaSamplesKHR     = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
    eDisjointKHR                 = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
    eMidpointChromaSamplesKHR    = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
    eSampledImageFilterCubicEXT  = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
    eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
    eSampledImageYcbcrConversionChromaReconstructionExplicitKHR =
      VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
    eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR =
      VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
    eSampledImageYcbcrConversionLinearFilterKHR =
      VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
    eSampledImageYcbcrConversionSeparateReconstructionFilterKHR =
      VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
    eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
    eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
  {
    switch ( value )
    {
      case FormatFeatureFlagBits::eSampledImage: return "SampledImage";
      case FormatFeatureFlagBits::eStorageImage: return "StorageImage";
      case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic";
      case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
      case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
      case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
      case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer";
      case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment";
      case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend";
      case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
      case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc";
      case FormatFeatureFlagBits::eBlitDst: return "BlitDst";
      case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear";
      case FormatFeatureFlagBits::eTransferSrc: return "TransferSrc";
      case FormatFeatureFlagBits::eTransferDst: return "TransferDst";
      case FormatFeatureFlagBits::eMidpointChromaSamples: return "MidpointChromaSamples";
      case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter:
        return "SampledImageYcbcrConversionLinearFilter";
      case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter:
        return "SampledImageYcbcrConversionSeparateReconstructionFilter";
      case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit:
        return "SampledImageYcbcrConversionChromaReconstructionExplicit";
      case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable:
        return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
      case FormatFeatureFlagBits::eDisjoint: return "Disjoint";
      case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples";
      case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax";
      case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR";
      case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR";
      case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
      case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR";
      case FormatFeatureFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ImageCreateFlagBits : VkImageCreateFlags
  {
    eSparseBinding                     = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
    eSparseResidency                   = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
    eSparseAliased                     = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
    eMutableFormat                     = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
    eCubeCompatible                    = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
    eAlias                             = VK_IMAGE_CREATE_ALIAS_BIT,
    eSplitInstanceBindRegions          = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
    e2DArrayCompatible                 = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
    eBlockTexelViewCompatible          = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
    eExtendedUsage                     = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
    eProtected                         = VK_IMAGE_CREATE_PROTECTED_BIT,
    eDisjoint                          = VK_IMAGE_CREATE_DISJOINT_BIT,
    eCornerSampledNV                   = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
    eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
    eSubsampledEXT                     = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
    e2DArrayCompatibleKHR              = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
    eAliasKHR                          = VK_IMAGE_CREATE_ALIAS_BIT_KHR,
    eBlockTexelViewCompatibleKHR       = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
    eDisjointKHR                       = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
    eExtendedUsageKHR                  = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
    eSplitInstanceBindRegionsKHR       = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
  {
    switch ( value )
    {
      case ImageCreateFlagBits::eSparseBinding: return "SparseBinding";
      case ImageCreateFlagBits::eSparseResidency: return "SparseResidency";
      case ImageCreateFlagBits::eSparseAliased: return "SparseAliased";
      case ImageCreateFlagBits::eMutableFormat: return "MutableFormat";
      case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible";
      case ImageCreateFlagBits::eAlias: return "Alias";
      case ImageCreateFlagBits::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
      case ImageCreateFlagBits::e2DArrayCompatible: return "2DArrayCompatible";
      case ImageCreateFlagBits::eBlockTexelViewCompatible: return "BlockTexelViewCompatible";
      case ImageCreateFlagBits::eExtendedUsage: return "ExtendedUsage";
      case ImageCreateFlagBits::eProtected: return "Protected";
      case ImageCreateFlagBits::eDisjoint: return "Disjoint";
      case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV";
      case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT";
      case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ImageTiling
  {
    eOptimal              = VK_IMAGE_TILING_OPTIMAL,
    eLinear               = VK_IMAGE_TILING_LINEAR,
    eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ImageTiling value )
  {
    switch ( value )
    {
      case ImageTiling::eOptimal: return "Optimal";
      case ImageTiling::eLinear: return "Linear";
      case ImageTiling::eDrmFormatModifierEXT: return "DrmFormatModifierEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ImageType
  {
    e1D = VK_IMAGE_TYPE_1D,
    e2D = VK_IMAGE_TYPE_2D,
    e3D = VK_IMAGE_TYPE_3D
  };

  VULKAN_HPP_INLINE std::string to_string( ImageType value )
  {
    switch ( value )
    {
      case ImageType::e1D: return "1D";
      case ImageType::e2D: return "2D";
      case ImageType::e3D: return "3D";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ImageUsageFlagBits : VkImageUsageFlags
  {
    eTransferSrc            = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
    eTransferDst            = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
    eSampled                = VK_IMAGE_USAGE_SAMPLED_BIT,
    eStorage                = VK_IMAGE_USAGE_STORAGE_BIT,
    eColorAttachment        = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
    eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
    eTransientAttachment    = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
    eInputAttachment        = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR,
    eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
    eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eFragmentDensityMapEXT            = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
    eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR,
    eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
    eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value )
  {
    switch ( value )
    {
      case ImageUsageFlagBits::eTransferSrc: return "TransferSrc";
      case ImageUsageFlagBits::eTransferDst: return "TransferDst";
      case ImageUsageFlagBits::eSampled: return "Sampled";
      case ImageUsageFlagBits::eStorage: return "Storage";
      case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment";
      case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
      case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment";
      case ImageUsageFlagBits::eInputAttachment: return "InputAttachment";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR";
      case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR";
      case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT";
      case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
      case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
      case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class InstanceCreateFlagBits
  {
  };

  VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits )
  {
    return "(void)";
  }

  enum class InternalAllocationType
  {
    eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
  };

  VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value )
  {
    switch ( value )
    {
      case InternalAllocationType::eExecutable: return "Executable";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class MemoryHeapFlagBits : VkMemoryHeapFlags
  {
    eDeviceLocal      = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
    eMultiInstance    = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
    eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value )
  {
    switch ( value )
    {
      case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal";
      case MemoryHeapFlagBits::eMultiInstance: return "MultiInstance";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags
  {
    eDeviceLocal       = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
    eHostVisible       = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
    eHostCoherent      = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
    eHostCached        = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
    eLazilyAllocated   = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
    eProtected         = VK_MEMORY_PROPERTY_PROTECTED_BIT,
    eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD,
    eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD
  };

  VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value )
  {
    switch ( value )
    {
      case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal";
      case MemoryPropertyFlagBits::eHostVisible: return "HostVisible";
      case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent";
      case MemoryPropertyFlagBits::eHostCached: return "HostCached";
      case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated";
      case MemoryPropertyFlagBits::eProtected: return "Protected";
      case MemoryPropertyFlagBits::eDeviceCoherentAMD: return "DeviceCoherentAMD";
      case MemoryPropertyFlagBits::eDeviceUncachedAMD: return "DeviceUncachedAMD";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PhysicalDeviceType
  {
    eOther         = VK_PHYSICAL_DEVICE_TYPE_OTHER,
    eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
    eDiscreteGpu   = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
    eVirtualGpu    = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
    eCpu           = VK_PHYSICAL_DEVICE_TYPE_CPU
  };

  VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value )
  {
    switch ( value )
    {
      case PhysicalDeviceType::eOther: return "Other";
      case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu";
      case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu";
      case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu";
      case PhysicalDeviceType::eCpu: return "Cpu";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class QueueFlagBits : VkQueueFlags
  {
    eGraphics      = VK_QUEUE_GRAPHICS_BIT,
    eCompute       = VK_QUEUE_COMPUTE_BIT,
    eTransfer      = VK_QUEUE_TRANSFER_BIT,
    eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
    eProtected     = VK_QUEUE_PROTECTED_BIT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
    eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
  };

  VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value )
  {
    switch ( value )
    {
      case QueueFlagBits::eGraphics: return "Graphics";
      case QueueFlagBits::eCompute: return "Compute";
      case QueueFlagBits::eTransfer: return "Transfer";
      case QueueFlagBits::eSparseBinding: return "SparseBinding";
      case QueueFlagBits::eProtected: return "Protected";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR";
      case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SampleCountFlagBits : VkSampleCountFlags
  {
    e1  = VK_SAMPLE_COUNT_1_BIT,
    e2  = VK_SAMPLE_COUNT_2_BIT,
    e4  = VK_SAMPLE_COUNT_4_BIT,
    e8  = VK_SAMPLE_COUNT_8_BIT,
    e16 = VK_SAMPLE_COUNT_16_BIT,
    e32 = VK_SAMPLE_COUNT_32_BIT,
    e64 = VK_SAMPLE_COUNT_64_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value )
  {
    switch ( value )
    {
      case SampleCountFlagBits::e1: return "1";
      case SampleCountFlagBits::e2: return "2";
      case SampleCountFlagBits::e4: return "4";
      case SampleCountFlagBits::e8: return "8";
      case SampleCountFlagBits::e16: return "16";
      case SampleCountFlagBits::e32: return "32";
      case SampleCountFlagBits::e64: return "64";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SystemAllocationScope
  {
    eCommand  = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
    eObject   = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
    eCache    = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
    eDevice   = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
    eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
  };

  VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value )
  {
    switch ( value )
    {
      case SystemAllocationScope::eCommand: return "Command";
      case SystemAllocationScope::eObject: return "Object";
      case SystemAllocationScope::eCache: return "Cache";
      case SystemAllocationScope::eDevice: return "Device";
      case SystemAllocationScope::eInstance: return "Instance";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DeviceCreateFlagBits
  {
  };

  VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits )
  {
    return "(void)";
  }

  enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags
  {
    eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value )
  {
    switch ( value )
    {
      case DeviceQueueCreateFlagBits::eProtected: return "Protected";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PipelineStageFlagBits : VkPipelineStageFlags
  {
    eTopOfPipe                        = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
    eDrawIndirect                     = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
    eVertexInput                      = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
    eVertexShader                     = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
    eTessellationControlShader        = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
    eTessellationEvaluationShader     = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
    eGeometryShader                   = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
    eFragmentShader                   = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
    eEarlyFragmentTests               = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
    eLateFragmentTests                = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
    eColorAttachmentOutput            = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
    eComputeShader                    = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
    eTransfer                         = VK_PIPELINE_STAGE_TRANSFER_BIT,
    eBottomOfPipe                     = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
    eHost                             = VK_PIPELINE_STAGE_HOST_BIT,
    eAllGraphics                      = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
    eAllCommands                      = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
    eTransformFeedbackEXT             = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
    eConditionalRenderingEXT          = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
    eAccelerationStructureBuildKHR    = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
    eRayTracingShaderKHR              = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
    eTaskShaderNV                     = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
    eMeshShaderNV                     = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
    eFragmentDensityProcessEXT        = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
    eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
    eCommandPreprocessNV              = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
    eNoneKHR                          = VK_PIPELINE_STAGE_NONE_KHR,
    eAccelerationStructureBuildNV     = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
    eRayTracingShaderNV               = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
    eShadingRateImageNV               = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
  {
    switch ( value )
    {
      case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe";
      case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect";
      case PipelineStageFlagBits::eVertexInput: return "VertexInput";
      case PipelineStageFlagBits::eVertexShader: return "VertexShader";
      case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader";
      case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader";
      case PipelineStageFlagBits::eGeometryShader: return "GeometryShader";
      case PipelineStageFlagBits::eFragmentShader: return "FragmentShader";
      case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests";
      case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests";
      case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput";
      case PipelineStageFlagBits::eComputeShader: return "ComputeShader";
      case PipelineStageFlagBits::eTransfer: return "Transfer";
      case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe";
      case PipelineStageFlagBits::eHost: return "Host";
      case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
      case PipelineStageFlagBits::eAllCommands: return "AllCommands";
      case PipelineStageFlagBits::eTransformFeedbackEXT: return "TransformFeedbackEXT";
      case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
      case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR";
      case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR";
      case PipelineStageFlagBits::eTaskShaderNV: return "TaskShaderNV";
      case PipelineStageFlagBits::eMeshShaderNV: return "MeshShaderNV";
      case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT";
      case PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR";
      case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV";
      case PipelineStageFlagBits::eNoneKHR: return "NoneKHR";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ImageAspectFlagBits : VkImageAspectFlags
  {
    eColor           = VK_IMAGE_ASPECT_COLOR_BIT,
    eDepth           = VK_IMAGE_ASPECT_DEPTH_BIT,
    eStencil         = VK_IMAGE_ASPECT_STENCIL_BIT,
    eMetadata        = VK_IMAGE_ASPECT_METADATA_BIT,
    ePlane0          = VK_IMAGE_ASPECT_PLANE_0_BIT,
    ePlane1          = VK_IMAGE_ASPECT_PLANE_1_BIT,
    ePlane2          = VK_IMAGE_ASPECT_PLANE_2_BIT,
    eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
    eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
    eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
    eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT,
    ePlane0KHR       = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
    ePlane1KHR       = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
    ePlane2KHR       = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
  {
    switch ( value )
    {
      case ImageAspectFlagBits::eColor: return "Color";
      case ImageAspectFlagBits::eDepth: return "Depth";
      case ImageAspectFlagBits::eStencil: return "Stencil";
      case ImageAspectFlagBits::eMetadata: return "Metadata";
      case ImageAspectFlagBits::ePlane0: return "Plane0";
      case ImageAspectFlagBits::ePlane1: return "Plane1";
      case ImageAspectFlagBits::ePlane2: return "Plane2";
      case ImageAspectFlagBits::eMemoryPlane0EXT: return "MemoryPlane0EXT";
      case ImageAspectFlagBits::eMemoryPlane1EXT: return "MemoryPlane1EXT";
      case ImageAspectFlagBits::eMemoryPlane2EXT: return "MemoryPlane2EXT";
      case ImageAspectFlagBits::eMemoryPlane3EXT: return "MemoryPlane3EXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags
  {
    eSingleMiptail        = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
    eAlignedMipSize       = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
    eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )
  {
    switch ( value )
    {
      case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail";
      case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize";
      case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags
  {
    eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value )
  {
    switch ( value )
    {
      case SparseMemoryBindFlagBits::eMetadata: return "Metadata";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class FenceCreateFlagBits : VkFenceCreateFlags
  {
    eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value )
  {
    switch ( value )
    {
      case FenceCreateFlagBits::eSignaled: return "Signaled";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class EventCreateFlagBits : VkEventCreateFlags
  {
    eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits value )
  {
    switch ( value )
    {
      case EventCreateFlagBits::eDeviceOnlyKHR: return "DeviceOnlyKHR";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags
  {
    eInputAssemblyVertices            = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
    eInputAssemblyPrimitives          = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
    eVertexShaderInvocations          = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
    eGeometryShaderInvocations        = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
    eGeometryShaderPrimitives         = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
    eClippingInvocations              = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
    eClippingPrimitives               = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
    eFragmentShaderInvocations        = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
    eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
    eTessellationEvaluationShaderInvocations =
      VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
    eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value )
  {
    switch ( value )
    {
      case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices";
      case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives";
      case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations";
      case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations";
      case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives";
      case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations";
      case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives";
      case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations";
      case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches";
      case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations:
        return "TessellationEvaluationShaderInvocations";
      case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class QueryPoolCreateFlagBits
  {
  };

  VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits )
  {
    return "(void)";
  }

  enum class QueryResultFlagBits : VkQueryResultFlags
  {
    e64               = VK_QUERY_RESULT_64_BIT,
    eWait             = VK_QUERY_RESULT_WAIT_BIT,
    eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
    ePartial          = VK_QUERY_RESULT_PARTIAL_BIT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
  };

  VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value )
  {
    switch ( value )
    {
      case QueryResultFlagBits::e64: return "64";
      case QueryResultFlagBits::eWait: return "Wait";
      case QueryResultFlagBits::eWithAvailability: return "WithAvailability";
      case QueryResultFlagBits::ePartial: return "Partial";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case QueryResultFlagBits::eWithStatusKHR: return "WithStatusKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class QueryType
  {
    eOcclusion          = VK_QUERY_TYPE_OCCLUSION,
    ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
    eTimestamp          = VK_QUERY_TYPE_TIMESTAMP,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eTransformFeedbackStreamEXT                = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT,
    ePerformanceQueryKHR                       = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR,
    eAccelerationStructureCompactedSizeKHR     = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
    eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR,
    eAccelerationStructureCompactedSizeNV      = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV,
    ePerformanceQueryINTEL                     = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoEncodeBitstreamBufferRangeKHR = VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
  };

  VULKAN_HPP_INLINE std::string to_string( QueryType value )
  {
    switch ( value )
    {
      case QueryType::eOcclusion: return "Occlusion";
      case QueryType::ePipelineStatistics: return "PipelineStatistics";
      case QueryType::eTimestamp: return "Timestamp";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case QueryType::eResultStatusOnlyKHR: return "ResultStatusOnlyKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT";
      case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR";
      case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR";
      case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR";
      case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV";
      case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case QueryType::eVideoEncodeBitstreamBufferRangeKHR: return "VideoEncodeBitstreamBufferRangeKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class BufferCreateFlagBits : VkBufferCreateFlags
  {
    eSparseBinding                 = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
    eSparseResidency               = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
    eSparseAliased                 = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
    eProtected                     = VK_BUFFER_CREATE_PROTECTED_BIT,
    eDeviceAddressCaptureReplay    = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
    eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT,
    eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value )
  {
    switch ( value )
    {
      case BufferCreateFlagBits::eSparseBinding: return "SparseBinding";
      case BufferCreateFlagBits::eSparseResidency: return "SparseResidency";
      case BufferCreateFlagBits::eSparseAliased: return "SparseAliased";
      case BufferCreateFlagBits::eProtected: return "Protected";
      case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class BufferUsageFlagBits : VkBufferUsageFlags
  {
    eTransferSrc         = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
    eTransferDst         = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
    eUniformTexelBuffer  = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
    eStorageTexelBuffer  = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
    eUniformBuffer       = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
    eStorageBuffer       = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
    eIndexBuffer         = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
    eVertexBuffer        = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
    eIndirectBuffer      = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
    eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
    eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eTransformFeedbackBufferEXT                 = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
    eTransformFeedbackCounterBufferEXT          = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
    eConditionalRenderingEXT                    = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
    eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR,
    eAccelerationStructureStorageKHR            = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR,
    eShaderBindingTableKHR                      = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR,
    eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eRayTracingNV           = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
    eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
    eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value )
  {
    switch ( value )
    {
      case BufferUsageFlagBits::eTransferSrc: return "TransferSrc";
      case BufferUsageFlagBits::eTransferDst: return "TransferDst";
      case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
      case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
      case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer";
      case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer";
      case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer";
      case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer";
      case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer";
      case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case BufferUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR";
      case BufferUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT";
      case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT";
      case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
      case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR:
        return "AccelerationStructureBuildInputReadOnlyKHR";
      case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR";
      case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
      case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SharingMode
  {
    eExclusive  = VK_SHARING_MODE_EXCLUSIVE,
    eConcurrent = VK_SHARING_MODE_CONCURRENT
  };

  VULKAN_HPP_INLINE std::string to_string( SharingMode value )
  {
    switch ( value )
    {
      case SharingMode::eExclusive: return "Exclusive";
      case SharingMode::eConcurrent: return "Concurrent";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ImageLayout
  {
    eUndefined                             = VK_IMAGE_LAYOUT_UNDEFINED,
    eGeneral                               = VK_IMAGE_LAYOUT_GENERAL,
    eColorAttachmentOptimal                = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
    eDepthStencilAttachmentOptimal         = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
    eDepthStencilReadOnlyOptimal           = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
    eShaderReadOnlyOptimal                 = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
    eTransferSrcOptimal                    = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
    eTransferDstOptimal                    = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
    ePreinitialized                        = VK_IMAGE_LAYOUT_PREINITIALIZED,
    eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
    eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
    eDepthAttachmentOptimal                = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
    eDepthReadOnlyOptimal                  = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
    eStencilAttachmentOptimal              = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
    eStencilReadOnlyOptimal                = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
    ePresentSrcKHR                         = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR,
    eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR,
    eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eSharedPresentKHR                        = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
    eFragmentDensityMapOptimalEXT            = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
    eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR,
    eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR,
    eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eReadOnlyOptimalKHR                       = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR,
    eAttachmentOptimalKHR                     = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR,
    eDepthAttachmentOptimalKHR                = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
    eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
    eDepthReadOnlyOptimalKHR                  = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
    eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
    eShadingRateOptimalNV                     = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
    eStencilAttachmentOptimalKHR              = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR,
    eStencilReadOnlyOptimalKHR                = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( ImageLayout value )
  {
    switch ( value )
    {
      case ImageLayout::eUndefined: return "Undefined";
      case ImageLayout::eGeneral: return "General";
      case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal";
      case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal";
      case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal";
      case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal";
      case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal";
      case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal";
      case ImageLayout::ePreinitialized: return "Preinitialized";
      case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal: return "DepthReadOnlyStencilAttachmentOptimal";
      case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal: return "DepthAttachmentStencilReadOnlyOptimal";
      case ImageLayout::eDepthAttachmentOptimal: return "DepthAttachmentOptimal";
      case ImageLayout::eDepthReadOnlyOptimal: return "DepthReadOnlyOptimal";
      case ImageLayout::eStencilAttachmentOptimal: return "StencilAttachmentOptimal";
      case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal";
      case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR";
      case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR";
      case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR";
      case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT";
      case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR";
      case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR";
      case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case ImageLayout::eReadOnlyOptimalKHR: return "ReadOnlyOptimalKHR";
      case ImageLayout::eAttachmentOptimalKHR: return "AttachmentOptimalKHR";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ComponentSwizzle
  {
    eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
    eZero     = VK_COMPONENT_SWIZZLE_ZERO,
    eOne      = VK_COMPONENT_SWIZZLE_ONE,
    eR        = VK_COMPONENT_SWIZZLE_R,
    eG        = VK_COMPONENT_SWIZZLE_G,
    eB        = VK_COMPONENT_SWIZZLE_B,
    eA        = VK_COMPONENT_SWIZZLE_A
  };

  VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value )
  {
    switch ( value )
    {
      case ComponentSwizzle::eIdentity: return "Identity";
      case ComponentSwizzle::eZero: return "Zero";
      case ComponentSwizzle::eOne: return "One";
      case ComponentSwizzle::eR: return "R";
      case ComponentSwizzle::eG: return "G";
      case ComponentSwizzle::eB: return "B";
      case ComponentSwizzle::eA: return "A";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ImageViewCreateFlagBits : VkImageViewCreateFlags
  {
    eFragmentDensityMapDynamicEXT  = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT,
    eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
  {
    switch ( value )
    {
      case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT";
      case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT: return "FragmentDensityMapDeferredEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ImageViewType
  {
    e1D        = VK_IMAGE_VIEW_TYPE_1D,
    e2D        = VK_IMAGE_VIEW_TYPE_2D,
    e3D        = VK_IMAGE_VIEW_TYPE_3D,
    eCube      = VK_IMAGE_VIEW_TYPE_CUBE,
    e1DArray   = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
    e2DArray   = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
    eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
  };

  VULKAN_HPP_INLINE std::string to_string( ImageViewType value )
  {
    switch ( value )
    {
      case ImageViewType::e1D: return "1D";
      case ImageViewType::e2D: return "2D";
      case ImageViewType::e3D: return "3D";
      case ImageViewType::eCube: return "Cube";
      case ImageViewType::e1DArray: return "1DArray";
      case ImageViewType::e2DArray: return "2DArray";
      case ImageViewType::eCubeArray: return "CubeArray";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
  {
    return "(void)";
  }

  enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags
  {
    eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value )
  {
    switch ( value )
    {
      case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT: return "ExternallySynchronizedEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class BlendFactor
  {
    eZero                  = VK_BLEND_FACTOR_ZERO,
    eOne                   = VK_BLEND_FACTOR_ONE,
    eSrcColor              = VK_BLEND_FACTOR_SRC_COLOR,
    eOneMinusSrcColor      = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
    eDstColor              = VK_BLEND_FACTOR_DST_COLOR,
    eOneMinusDstColor      = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
    eSrcAlpha              = VK_BLEND_FACTOR_SRC_ALPHA,
    eOneMinusSrcAlpha      = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
    eDstAlpha              = VK_BLEND_FACTOR_DST_ALPHA,
    eOneMinusDstAlpha      = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
    eConstantColor         = VK_BLEND_FACTOR_CONSTANT_COLOR,
    eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
    eConstantAlpha         = VK_BLEND_FACTOR_CONSTANT_ALPHA,
    eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
    eSrcAlphaSaturate      = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
    eSrc1Color             = VK_BLEND_FACTOR_SRC1_COLOR,
    eOneMinusSrc1Color     = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
    eSrc1Alpha             = VK_BLEND_FACTOR_SRC1_ALPHA,
    eOneMinusSrc1Alpha     = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
  };

  VULKAN_HPP_INLINE std::string to_string( BlendFactor value )
  {
    switch ( value )
    {
      case BlendFactor::eZero: return "Zero";
      case BlendFactor::eOne: return "One";
      case BlendFactor::eSrcColor: return "SrcColor";
      case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor";
      case BlendFactor::eDstColor: return "DstColor";
      case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor";
      case BlendFactor::eSrcAlpha: return "SrcAlpha";
      case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha";
      case BlendFactor::eDstAlpha: return "DstAlpha";
      case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha";
      case BlendFactor::eConstantColor: return "ConstantColor";
      case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor";
      case BlendFactor::eConstantAlpha: return "ConstantAlpha";
      case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha";
      case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate";
      case BlendFactor::eSrc1Color: return "Src1Color";
      case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color";
      case BlendFactor::eSrc1Alpha: return "Src1Alpha";
      case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class BlendOp
  {
    eAdd                 = VK_BLEND_OP_ADD,
    eSubtract            = VK_BLEND_OP_SUBTRACT,
    eReverseSubtract     = VK_BLEND_OP_REVERSE_SUBTRACT,
    eMin                 = VK_BLEND_OP_MIN,
    eMax                 = VK_BLEND_OP_MAX,
    eZeroEXT             = VK_BLEND_OP_ZERO_EXT,
    eSrcEXT              = VK_BLEND_OP_SRC_EXT,
    eDstEXT              = VK_BLEND_OP_DST_EXT,
    eSrcOverEXT          = VK_BLEND_OP_SRC_OVER_EXT,
    eDstOverEXT          = VK_BLEND_OP_DST_OVER_EXT,
    eSrcInEXT            = VK_BLEND_OP_SRC_IN_EXT,
    eDstInEXT            = VK_BLEND_OP_DST_IN_EXT,
    eSrcOutEXT           = VK_BLEND_OP_SRC_OUT_EXT,
    eDstOutEXT           = VK_BLEND_OP_DST_OUT_EXT,
    eSrcAtopEXT          = VK_BLEND_OP_SRC_ATOP_EXT,
    eDstAtopEXT          = VK_BLEND_OP_DST_ATOP_EXT,
    eXorEXT              = VK_BLEND_OP_XOR_EXT,
    eMultiplyEXT         = VK_BLEND_OP_MULTIPLY_EXT,
    eScreenEXT           = VK_BLEND_OP_SCREEN_EXT,
    eOverlayEXT          = VK_BLEND_OP_OVERLAY_EXT,
    eDarkenEXT           = VK_BLEND_OP_DARKEN_EXT,
    eLightenEXT          = VK_BLEND_OP_LIGHTEN_EXT,
    eColordodgeEXT       = VK_BLEND_OP_COLORDODGE_EXT,
    eColorburnEXT        = VK_BLEND_OP_COLORBURN_EXT,
    eHardlightEXT        = VK_BLEND_OP_HARDLIGHT_EXT,
    eSoftlightEXT        = VK_BLEND_OP_SOFTLIGHT_EXT,
    eDifferenceEXT       = VK_BLEND_OP_DIFFERENCE_EXT,
    eExclusionEXT        = VK_BLEND_OP_EXCLUSION_EXT,
    eInvertEXT           = VK_BLEND_OP_INVERT_EXT,
    eInvertRgbEXT        = VK_BLEND_OP_INVERT_RGB_EXT,
    eLineardodgeEXT      = VK_BLEND_OP_LINEARDODGE_EXT,
    eLinearburnEXT       = VK_BLEND_OP_LINEARBURN_EXT,
    eVividlightEXT       = VK_BLEND_OP_VIVIDLIGHT_EXT,
    eLinearlightEXT      = VK_BLEND_OP_LINEARLIGHT_EXT,
    ePinlightEXT         = VK_BLEND_OP_PINLIGHT_EXT,
    eHardmixEXT          = VK_BLEND_OP_HARDMIX_EXT,
    eHslHueEXT           = VK_BLEND_OP_HSL_HUE_EXT,
    eHslSaturationEXT    = VK_BLEND_OP_HSL_SATURATION_EXT,
    eHslColorEXT         = VK_BLEND_OP_HSL_COLOR_EXT,
    eHslLuminosityEXT    = VK_BLEND_OP_HSL_LUMINOSITY_EXT,
    ePlusEXT             = VK_BLEND_OP_PLUS_EXT,
    ePlusClampedEXT      = VK_BLEND_OP_PLUS_CLAMPED_EXT,
    ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT,
    ePlusDarkerEXT       = VK_BLEND_OP_PLUS_DARKER_EXT,
    eMinusEXT            = VK_BLEND_OP_MINUS_EXT,
    eMinusClampedEXT     = VK_BLEND_OP_MINUS_CLAMPED_EXT,
    eContrastEXT         = VK_BLEND_OP_CONTRAST_EXT,
    eInvertOvgEXT        = VK_BLEND_OP_INVERT_OVG_EXT,
    eRedEXT              = VK_BLEND_OP_RED_EXT,
    eGreenEXT            = VK_BLEND_OP_GREEN_EXT,
    eBlueEXT             = VK_BLEND_OP_BLUE_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( BlendOp value )
  {
    switch ( value )
    {
      case BlendOp::eAdd: return "Add";
      case BlendOp::eSubtract: return "Subtract";
      case BlendOp::eReverseSubtract: return "ReverseSubtract";
      case BlendOp::eMin: return "Min";
      case BlendOp::eMax: return "Max";
      case BlendOp::eZeroEXT: return "ZeroEXT";
      case BlendOp::eSrcEXT: return "SrcEXT";
      case BlendOp::eDstEXT: return "DstEXT";
      case BlendOp::eSrcOverEXT: return "SrcOverEXT";
      case BlendOp::eDstOverEXT: return "DstOverEXT";
      case BlendOp::eSrcInEXT: return "SrcInEXT";
      case BlendOp::eDstInEXT: return "DstInEXT";
      case BlendOp::eSrcOutEXT: return "SrcOutEXT";
      case BlendOp::eDstOutEXT: return "DstOutEXT";
      case BlendOp::eSrcAtopEXT: return "SrcAtopEXT";
      case BlendOp::eDstAtopEXT: return "DstAtopEXT";
      case BlendOp::eXorEXT: return "XorEXT";
      case BlendOp::eMultiplyEXT: return "MultiplyEXT";
      case BlendOp::eScreenEXT: return "ScreenEXT";
      case BlendOp::eOverlayEXT: return "OverlayEXT";
      case BlendOp::eDarkenEXT: return "DarkenEXT";
      case BlendOp::eLightenEXT: return "LightenEXT";
      case BlendOp::eColordodgeEXT: return "ColordodgeEXT";
      case BlendOp::eColorburnEXT: return "ColorburnEXT";
      case BlendOp::eHardlightEXT: return "HardlightEXT";
      case BlendOp::eSoftlightEXT: return "SoftlightEXT";
      case BlendOp::eDifferenceEXT: return "DifferenceEXT";
      case BlendOp::eExclusionEXT: return "ExclusionEXT";
      case BlendOp::eInvertEXT: return "InvertEXT";
      case BlendOp::eInvertRgbEXT: return "InvertRgbEXT";
      case BlendOp::eLineardodgeEXT: return "LineardodgeEXT";
      case BlendOp::eLinearburnEXT: return "LinearburnEXT";
      case BlendOp::eVividlightEXT: return "VividlightEXT";
      case BlendOp::eLinearlightEXT: return "LinearlightEXT";
      case BlendOp::ePinlightEXT: return "PinlightEXT";
      case BlendOp::eHardmixEXT: return "HardmixEXT";
      case BlendOp::eHslHueEXT: return "HslHueEXT";
      case BlendOp::eHslSaturationEXT: return "HslSaturationEXT";
      case BlendOp::eHslColorEXT: return "HslColorEXT";
      case BlendOp::eHslLuminosityEXT: return "HslLuminosityEXT";
      case BlendOp::ePlusEXT: return "PlusEXT";
      case BlendOp::ePlusClampedEXT: return "PlusClampedEXT";
      case BlendOp::ePlusClampedAlphaEXT: return "PlusClampedAlphaEXT";
      case BlendOp::ePlusDarkerEXT: return "PlusDarkerEXT";
      case BlendOp::eMinusEXT: return "MinusEXT";
      case BlendOp::eMinusClampedEXT: return "MinusClampedEXT";
      case BlendOp::eContrastEXT: return "ContrastEXT";
      case BlendOp::eInvertOvgEXT: return "InvertOvgEXT";
      case BlendOp::eRedEXT: return "RedEXT";
      case BlendOp::eGreenEXT: return "GreenEXT";
      case BlendOp::eBlueEXT: return "BlueEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ColorComponentFlagBits : VkColorComponentFlags
  {
    eR = VK_COLOR_COMPONENT_R_BIT,
    eG = VK_COLOR_COMPONENT_G_BIT,
    eB = VK_COLOR_COMPONENT_B_BIT,
    eA = VK_COLOR_COMPONENT_A_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value )
  {
    switch ( value )
    {
      case ColorComponentFlagBits::eR: return "R";
      case ColorComponentFlagBits::eG: return "G";
      case ColorComponentFlagBits::eB: return "B";
      case ColorComponentFlagBits::eA: return "A";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CompareOp
  {
    eNever          = VK_COMPARE_OP_NEVER,
    eLess           = VK_COMPARE_OP_LESS,
    eEqual          = VK_COMPARE_OP_EQUAL,
    eLessOrEqual    = VK_COMPARE_OP_LESS_OR_EQUAL,
    eGreater        = VK_COMPARE_OP_GREATER,
    eNotEqual       = VK_COMPARE_OP_NOT_EQUAL,
    eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
    eAlways         = VK_COMPARE_OP_ALWAYS
  };

  VULKAN_HPP_INLINE std::string to_string( CompareOp value )
  {
    switch ( value )
    {
      case CompareOp::eNever: return "Never";
      case CompareOp::eLess: return "Less";
      case CompareOp::eEqual: return "Equal";
      case CompareOp::eLessOrEqual: return "LessOrEqual";
      case CompareOp::eGreater: return "Greater";
      case CompareOp::eNotEqual: return "NotEqual";
      case CompareOp::eGreaterOrEqual: return "GreaterOrEqual";
      case CompareOp::eAlways: return "Always";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CullModeFlagBits : VkCullModeFlags
  {
    eNone         = VK_CULL_MODE_NONE,
    eFront        = VK_CULL_MODE_FRONT_BIT,
    eBack         = VK_CULL_MODE_BACK_BIT,
    eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
  };

  VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value )
  {
    switch ( value )
    {
      case CullModeFlagBits::eNone: return "None";
      case CullModeFlagBits::eFront: return "Front";
      case CullModeFlagBits::eBack: return "Back";
      case CullModeFlagBits::eFrontAndBack: return "FrontAndBack";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DynamicState
  {
    eViewport                       = VK_DYNAMIC_STATE_VIEWPORT,
    eScissor                        = VK_DYNAMIC_STATE_SCISSOR,
    eLineWidth                      = VK_DYNAMIC_STATE_LINE_WIDTH,
    eDepthBias                      = VK_DYNAMIC_STATE_DEPTH_BIAS,
    eBlendConstants                 = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
    eDepthBounds                    = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
    eStencilCompareMask             = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
    eStencilWriteMask               = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
    eStencilReference               = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
    eViewportWScalingNV             = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
    eDiscardRectangleEXT            = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
    eSampleLocationsEXT             = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
    eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
    eViewportShadingRatePaletteNV   = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
    eViewportCoarseSampleOrderNV    = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
    eExclusiveScissorNV             = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
    eFragmentShadingRateKHR         = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
    eLineStippleEXT                 = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
    eCullModeEXT                    = VK_DYNAMIC_STATE_CULL_MODE_EXT,
    eFrontFaceEXT                   = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
    ePrimitiveTopologyEXT           = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
    eViewportWithCountEXT           = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT,
    eScissorWithCountEXT            = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
    eVertexInputBindingStrideEXT    = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
    eDepthTestEnableEXT             = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
    eDepthWriteEnableEXT            = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
    eDepthCompareOpEXT              = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
    eDepthBoundsTestEnableEXT       = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
    eStencilTestEnableEXT           = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
    eStencilOpEXT                   = VK_DYNAMIC_STATE_STENCIL_OP_EXT,
    eVertexInputEXT                 = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT,
    ePatchControlPointsEXT          = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT,
    eRasterizerDiscardEnableEXT     = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT,
    eDepthBiasEnableEXT             = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT,
    eLogicOpEXT                     = VK_DYNAMIC_STATE_LOGIC_OP_EXT,
    ePrimitiveRestartEnableEXT      = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT,
    eColorWriteEnableEXT            = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DynamicState value )
  {
    switch ( value )
    {
      case DynamicState::eViewport: return "Viewport";
      case DynamicState::eScissor: return "Scissor";
      case DynamicState::eLineWidth: return "LineWidth";
      case DynamicState::eDepthBias: return "DepthBias";
      case DynamicState::eBlendConstants: return "BlendConstants";
      case DynamicState::eDepthBounds: return "DepthBounds";
      case DynamicState::eStencilCompareMask: return "StencilCompareMask";
      case DynamicState::eStencilWriteMask: return "StencilWriteMask";
      case DynamicState::eStencilReference: return "StencilReference";
      case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV";
      case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT";
      case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT";
      case DynamicState::eRayTracingPipelineStackSizeKHR: return "RayTracingPipelineStackSizeKHR";
      case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV";
      case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV";
      case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV";
      case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR";
      case DynamicState::eLineStippleEXT: return "LineStippleEXT";
      case DynamicState::eCullModeEXT: return "CullModeEXT";
      case DynamicState::eFrontFaceEXT: return "FrontFaceEXT";
      case DynamicState::ePrimitiveTopologyEXT: return "PrimitiveTopologyEXT";
      case DynamicState::eViewportWithCountEXT: return "ViewportWithCountEXT";
      case DynamicState::eScissorWithCountEXT: return "ScissorWithCountEXT";
      case DynamicState::eVertexInputBindingStrideEXT: return "VertexInputBindingStrideEXT";
      case DynamicState::eDepthTestEnableEXT: return "DepthTestEnableEXT";
      case DynamicState::eDepthWriteEnableEXT: return "DepthWriteEnableEXT";
      case DynamicState::eDepthCompareOpEXT: return "DepthCompareOpEXT";
      case DynamicState::eDepthBoundsTestEnableEXT: return "DepthBoundsTestEnableEXT";
      case DynamicState::eStencilTestEnableEXT: return "StencilTestEnableEXT";
      case DynamicState::eStencilOpEXT: return "StencilOpEXT";
      case DynamicState::eVertexInputEXT: return "VertexInputEXT";
      case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT";
      case DynamicState::eRasterizerDiscardEnableEXT: return "RasterizerDiscardEnableEXT";
      case DynamicState::eDepthBiasEnableEXT: return "DepthBiasEnableEXT";
      case DynamicState::eLogicOpEXT: return "LogicOpEXT";
      case DynamicState::ePrimitiveRestartEnableEXT: return "PrimitiveRestartEnableEXT";
      case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class FrontFace
  {
    eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
    eClockwise        = VK_FRONT_FACE_CLOCKWISE
  };

  VULKAN_HPP_INLINE std::string to_string( FrontFace value )
  {
    switch ( value )
    {
      case FrontFace::eCounterClockwise: return "CounterClockwise";
      case FrontFace::eClockwise: return "Clockwise";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class LogicOp
  {
    eClear        = VK_LOGIC_OP_CLEAR,
    eAnd          = VK_LOGIC_OP_AND,
    eAndReverse   = VK_LOGIC_OP_AND_REVERSE,
    eCopy         = VK_LOGIC_OP_COPY,
    eAndInverted  = VK_LOGIC_OP_AND_INVERTED,
    eNoOp         = VK_LOGIC_OP_NO_OP,
    eXor          = VK_LOGIC_OP_XOR,
    eOr           = VK_LOGIC_OP_OR,
    eNor          = VK_LOGIC_OP_NOR,
    eEquivalent   = VK_LOGIC_OP_EQUIVALENT,
    eInvert       = VK_LOGIC_OP_INVERT,
    eOrReverse    = VK_LOGIC_OP_OR_REVERSE,
    eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
    eOrInverted   = VK_LOGIC_OP_OR_INVERTED,
    eNand         = VK_LOGIC_OP_NAND,
    eSet          = VK_LOGIC_OP_SET
  };

  VULKAN_HPP_INLINE std::string to_string( LogicOp value )
  {
    switch ( value )
    {
      case LogicOp::eClear: return "Clear";
      case LogicOp::eAnd: return "And";
      case LogicOp::eAndReverse: return "AndReverse";
      case LogicOp::eCopy: return "Copy";
      case LogicOp::eAndInverted: return "AndInverted";
      case LogicOp::eNoOp: return "NoOp";
      case LogicOp::eXor: return "Xor";
      case LogicOp::eOr: return "Or";
      case LogicOp::eNor: return "Nor";
      case LogicOp::eEquivalent: return "Equivalent";
      case LogicOp::eInvert: return "Invert";
      case LogicOp::eOrReverse: return "OrReverse";
      case LogicOp::eCopyInverted: return "CopyInverted";
      case LogicOp::eOrInverted: return "OrInverted";
      case LogicOp::eNand: return "Nand";
      case LogicOp::eSet: return "Set";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PipelineCreateFlagBits : VkPipelineCreateFlags
  {
    eDisableOptimization                    = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
    eAllowDerivatives                       = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
    eDerivative                             = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
    eViewIndexFromDeviceIndex               = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
    eDispatchBase                           = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
    eRayTracingNoNullAnyHitShadersKHR       = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
    eRayTracingNoNullClosestHitShadersKHR   = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
    eRayTracingNoNullMissShadersKHR         = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
    eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
    eRayTracingSkipTrianglesKHR             = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
    eRayTracingSkipAabbsKHR                 = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR,
    eRayTracingShaderGroupHandleCaptureReplayKHR =
      VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR,
    eDeferCompileNV                    = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
    eCaptureStatisticsKHR              = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
    eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
    eIndirectBindableNV                = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV,
    eLibraryKHR                        = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR,
    eFailOnPipelineCompileRequiredEXT  = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
    eEarlyReturnOnFailureEXT           = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT,
    eDispatchBaseKHR                   = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
    eViewIndexFromDeviceIndexKHR       = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
  {
    switch ( value )
    {
      case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization";
      case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives";
      case PipelineCreateFlagBits::eDerivative: return "Derivative";
      case PipelineCreateFlagBits::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex";
      case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase";
      case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR";
      case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR";
      case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR";
      case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR:
        return "RayTracingNoNullIntersectionShadersKHR";
      case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR";
      case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR";
      case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR:
        return "RayTracingShaderGroupHandleCaptureReplayKHR";
      case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV";
      case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR";
      case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR";
      case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV";
      case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR";
      case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT: return "FailOnPipelineCompileRequiredEXT";
      case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT: return "EarlyReturnOnFailureEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags
  {
    eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
    eRequireFullSubgroupsEXT     = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value )
  {
    switch ( value )
    {
      case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT: return "AllowVaryingSubgroupSizeEXT";
      case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT: return "RequireFullSubgroupsEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PolygonMode
  {
    eFill            = VK_POLYGON_MODE_FILL,
    eLine            = VK_POLYGON_MODE_LINE,
    ePoint           = VK_POLYGON_MODE_POINT,
    eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV
  };

  VULKAN_HPP_INLINE std::string to_string( PolygonMode value )
  {
    switch ( value )
    {
      case PolygonMode::eFill: return "Fill";
      case PolygonMode::eLine: return "Line";
      case PolygonMode::ePoint: return "Point";
      case PolygonMode::eFillRectangleNV: return "FillRectangleNV";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PrimitiveTopology
  {
    ePointList                  = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
    eLineList                   = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
    eLineStrip                  = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
    eTriangleList               = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
    eTriangleStrip              = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
    eTriangleFan                = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
    eLineListWithAdjacency      = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
    eLineStripWithAdjacency     = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
    eTriangleListWithAdjacency  = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
    eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
    ePatchList                  = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
  };

  VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value )
  {
    switch ( value )
    {
      case PrimitiveTopology::ePointList: return "PointList";
      case PrimitiveTopology::eLineList: return "LineList";
      case PrimitiveTopology::eLineStrip: return "LineStrip";
      case PrimitiveTopology::eTriangleList: return "TriangleList";
      case PrimitiveTopology::eTriangleStrip: return "TriangleStrip";
      case PrimitiveTopology::eTriangleFan: return "TriangleFan";
      case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency";
      case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency";
      case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency";
      case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency";
      case PrimitiveTopology::ePatchList: return "PatchList";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ShaderStageFlagBits : VkShaderStageFlags
  {
    eVertex                 = VK_SHADER_STAGE_VERTEX_BIT,
    eTessellationControl    = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
    eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
    eGeometry               = VK_SHADER_STAGE_GEOMETRY_BIT,
    eFragment               = VK_SHADER_STAGE_FRAGMENT_BIT,
    eCompute                = VK_SHADER_STAGE_COMPUTE_BIT,
    eAllGraphics            = VK_SHADER_STAGE_ALL_GRAPHICS,
    eAll                    = VK_SHADER_STAGE_ALL,
    eRaygenKHR              = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
    eAnyHitKHR              = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
    eClosestHitKHR          = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
    eMissKHR                = VK_SHADER_STAGE_MISS_BIT_KHR,
    eIntersectionKHR        = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
    eCallableKHR            = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
    eTaskNV                 = VK_SHADER_STAGE_TASK_BIT_NV,
    eMeshNV                 = VK_SHADER_STAGE_MESH_BIT_NV,
    eAnyHitNV               = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
    eCallableNV             = VK_SHADER_STAGE_CALLABLE_BIT_NV,
    eClosestHitNV           = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
    eIntersectionNV         = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
    eMissNV                 = VK_SHADER_STAGE_MISS_BIT_NV,
    eRaygenNV               = VK_SHADER_STAGE_RAYGEN_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
  {
    switch ( value )
    {
      case ShaderStageFlagBits::eVertex: return "Vertex";
      case ShaderStageFlagBits::eTessellationControl: return "TessellationControl";
      case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation";
      case ShaderStageFlagBits::eGeometry: return "Geometry";
      case ShaderStageFlagBits::eFragment: return "Fragment";
      case ShaderStageFlagBits::eCompute: return "Compute";
      case ShaderStageFlagBits::eAllGraphics: return "AllGraphics";
      case ShaderStageFlagBits::eAll: return "All";
      case ShaderStageFlagBits::eRaygenKHR: return "RaygenKHR";
      case ShaderStageFlagBits::eAnyHitKHR: return "AnyHitKHR";
      case ShaderStageFlagBits::eClosestHitKHR: return "ClosestHitKHR";
      case ShaderStageFlagBits::eMissKHR: return "MissKHR";
      case ShaderStageFlagBits::eIntersectionKHR: return "IntersectionKHR";
      case ShaderStageFlagBits::eCallableKHR: return "CallableKHR";
      case ShaderStageFlagBits::eTaskNV: return "TaskNV";
      case ShaderStageFlagBits::eMeshNV: return "MeshNV";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class StencilOp
  {
    eKeep              = VK_STENCIL_OP_KEEP,
    eZero              = VK_STENCIL_OP_ZERO,
    eReplace           = VK_STENCIL_OP_REPLACE,
    eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
    eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
    eInvert            = VK_STENCIL_OP_INVERT,
    eIncrementAndWrap  = VK_STENCIL_OP_INCREMENT_AND_WRAP,
    eDecrementAndWrap  = VK_STENCIL_OP_DECREMENT_AND_WRAP
  };

  VULKAN_HPP_INLINE std::string to_string( StencilOp value )
  {
    switch ( value )
    {
      case StencilOp::eKeep: return "Keep";
      case StencilOp::eZero: return "Zero";
      case StencilOp::eReplace: return "Replace";
      case StencilOp::eIncrementAndClamp: return "IncrementAndClamp";
      case StencilOp::eDecrementAndClamp: return "DecrementAndClamp";
      case StencilOp::eInvert: return "Invert";
      case StencilOp::eIncrementAndWrap: return "IncrementAndWrap";
      case StencilOp::eDecrementAndWrap: return "DecrementAndWrap";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VertexInputRate
  {
    eVertex   = VK_VERTEX_INPUT_RATE_VERTEX,
    eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
  };

  VULKAN_HPP_INLINE std::string to_string( VertexInputRate value )
  {
    switch ( value )
    {
      case VertexInputRate::eVertex: return "Vertex";
      case VertexInputRate::eInstance: return "Instance";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class BorderColor
  {
    eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
    eIntTransparentBlack   = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
    eFloatOpaqueBlack      = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
    eIntOpaqueBlack        = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
    eFloatOpaqueWhite      = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
    eIntOpaqueWhite        = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
    eFloatCustomEXT        = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT,
    eIntCustomEXT          = VK_BORDER_COLOR_INT_CUSTOM_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( BorderColor value )
  {
    switch ( value )
    {
      case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack";
      case BorderColor::eIntTransparentBlack: return "IntTransparentBlack";
      case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack";
      case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack";
      case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite";
      case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite";
      case BorderColor::eFloatCustomEXT: return "FloatCustomEXT";
      case BorderColor::eIntCustomEXT: return "IntCustomEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class Filter
  {
    eNearest  = VK_FILTER_NEAREST,
    eLinear   = VK_FILTER_LINEAR,
    eCubicIMG = VK_FILTER_CUBIC_IMG,
    eCubicEXT = VK_FILTER_CUBIC_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( Filter value )
  {
    switch ( value )
    {
      case Filter::eNearest: return "Nearest";
      case Filter::eLinear: return "Linear";
      case Filter::eCubicIMG: return "CubicIMG";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SamplerAddressMode
  {
    eRepeat               = VK_SAMPLER_ADDRESS_MODE_REPEAT,
    eMirroredRepeat       = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
    eClampToEdge          = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
    eClampToBorder        = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
    eMirrorClampToEdge    = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
    eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value )
  {
    switch ( value )
    {
      case SamplerAddressMode::eRepeat: return "Repeat";
      case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat";
      case SamplerAddressMode::eClampToEdge: return "ClampToEdge";
      case SamplerAddressMode::eClampToBorder: return "ClampToBorder";
      case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SamplerCreateFlagBits : VkSamplerCreateFlags
  {
    eSubsampledEXT                     = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
    eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value )
  {
    switch ( value )
    {
      case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT";
      case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SamplerMipmapMode
  {
    eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
    eLinear  = VK_SAMPLER_MIPMAP_MODE_LINEAR
  };

  VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value )
  {
    switch ( value )
    {
      case SamplerMipmapMode::eNearest: return "Nearest";
      case SamplerMipmapMode::eLinear: return "Linear";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags
  {
    eFreeDescriptorSet  = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
    eUpdateAfterBind    = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
    eHostOnlyVALVE      = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE,
    eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value )
  {
    switch ( value )
    {
      case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet";
      case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind";
      case DescriptorPoolCreateFlagBits::eHostOnlyVALVE: return "HostOnlyVALVE";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags
  {
    eUpdateAfterBindPool    = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
    ePushDescriptorKHR      = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
    eHostOnlyPoolVALVE      = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE,
    eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )
  {
    switch ( value )
    {
      case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool";
      case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR";
      case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE: return "HostOnlyPoolVALVE";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DescriptorType
  {
    eSampler                  = VK_DESCRIPTOR_TYPE_SAMPLER,
    eCombinedImageSampler     = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
    eSampledImage             = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
    eStorageImage             = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
    eUniformTexelBuffer       = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
    eStorageTexelBuffer       = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
    eUniformBuffer            = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
    eStorageBuffer            = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
    eUniformBufferDynamic     = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
    eStorageBufferDynamic     = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
    eInputAttachment          = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
    eInlineUniformBlockEXT    = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
    eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
    eAccelerationStructureNV  = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV,
    eMutableVALVE             = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE
  };

  VULKAN_HPP_INLINE std::string to_string( DescriptorType value )
  {
    switch ( value )
    {
      case DescriptorType::eSampler: return "Sampler";
      case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler";
      case DescriptorType::eSampledImage: return "SampledImage";
      case DescriptorType::eStorageImage: return "StorageImage";
      case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer";
      case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer";
      case DescriptorType::eUniformBuffer: return "UniformBuffer";
      case DescriptorType::eStorageBuffer: return "StorageBuffer";
      case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic";
      case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic";
      case DescriptorType::eInputAttachment: return "InputAttachment";
      case DescriptorType::eInlineUniformBlockEXT: return "InlineUniformBlockEXT";
      case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR";
      case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV";
      case DescriptorType::eMutableVALVE: return "MutableVALVE";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class AccessFlagBits : VkAccessFlags
  {
    eIndirectCommandRead                  = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
    eIndexRead                            = VK_ACCESS_INDEX_READ_BIT,
    eVertexAttributeRead                  = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
    eUniformRead                          = VK_ACCESS_UNIFORM_READ_BIT,
    eInputAttachmentRead                  = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
    eShaderRead                           = VK_ACCESS_SHADER_READ_BIT,
    eShaderWrite                          = VK_ACCESS_SHADER_WRITE_BIT,
    eColorAttachmentRead                  = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
    eColorAttachmentWrite                 = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
    eDepthStencilAttachmentRead           = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
    eDepthStencilAttachmentWrite          = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
    eTransferRead                         = VK_ACCESS_TRANSFER_READ_BIT,
    eTransferWrite                        = VK_ACCESS_TRANSFER_WRITE_BIT,
    eHostRead                             = VK_ACCESS_HOST_READ_BIT,
    eHostWrite                            = VK_ACCESS_HOST_WRITE_BIT,
    eMemoryRead                           = VK_ACCESS_MEMORY_READ_BIT,
    eMemoryWrite                          = VK_ACCESS_MEMORY_WRITE_BIT,
    eTransformFeedbackWriteEXT            = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
    eTransformFeedbackCounterReadEXT      = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
    eTransformFeedbackCounterWriteEXT     = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
    eConditionalRenderingReadEXT          = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
    eColorAttachmentReadNoncoherentEXT    = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
    eAccelerationStructureReadKHR         = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
    eAccelerationStructureWriteKHR        = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
    eFragmentDensityMapReadEXT            = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
    eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
    eCommandPreprocessReadNV              = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV,
    eCommandPreprocessWriteNV             = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV,
    eNoneKHR                              = VK_ACCESS_NONE_KHR,
    eAccelerationStructureReadNV          = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
    eAccelerationStructureWriteNV         = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
    eShadingRateImageReadNV               = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
  {
    switch ( value )
    {
      case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead";
      case AccessFlagBits::eIndexRead: return "IndexRead";
      case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead";
      case AccessFlagBits::eUniformRead: return "UniformRead";
      case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead";
      case AccessFlagBits::eShaderRead: return "ShaderRead";
      case AccessFlagBits::eShaderWrite: return "ShaderWrite";
      case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead";
      case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite";
      case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
      case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
      case AccessFlagBits::eTransferRead: return "TransferRead";
      case AccessFlagBits::eTransferWrite: return "TransferWrite";
      case AccessFlagBits::eHostRead: return "HostRead";
      case AccessFlagBits::eHostWrite: return "HostWrite";
      case AccessFlagBits::eMemoryRead: return "MemoryRead";
      case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
      case AccessFlagBits::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT";
      case AccessFlagBits::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT";
      case AccessFlagBits::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT";
      case AccessFlagBits::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT";
      case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
      case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR";
      case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR";
      case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT";
      case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR";
      case AccessFlagBits::eCommandPreprocessReadNV: return "CommandPreprocessReadNV";
      case AccessFlagBits::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV";
      case AccessFlagBits::eNoneKHR: return "NoneKHR";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags
  {
    eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value )
  {
    switch ( value )
    {
      case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class AttachmentLoadOp
  {
    eLoad     = VK_ATTACHMENT_LOAD_OP_LOAD,
    eClear    = VK_ATTACHMENT_LOAD_OP_CLEAR,
    eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE
  };

  VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value )
  {
    switch ( value )
    {
      case AttachmentLoadOp::eLoad: return "Load";
      case AttachmentLoadOp::eClear: return "Clear";
      case AttachmentLoadOp::eDontCare: return "DontCare";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class AttachmentStoreOp
  {
    eStore    = VK_ATTACHMENT_STORE_OP_STORE,
    eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE,
    eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM
  };

  VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value )
  {
    switch ( value )
    {
      case AttachmentStoreOp::eStore: return "Store";
      case AttachmentStoreOp::eDontCare: return "DontCare";
      case AttachmentStoreOp::eNoneQCOM: return "NoneQCOM";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DependencyFlagBits : VkDependencyFlags
  {
    eByRegion       = VK_DEPENDENCY_BY_REGION_BIT,
    eDeviceGroup    = VK_DEPENDENCY_DEVICE_GROUP_BIT,
    eViewLocal      = VK_DEPENDENCY_VIEW_LOCAL_BIT,
    eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR,
    eViewLocalKHR   = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
  {
    switch ( value )
    {
      case DependencyFlagBits::eByRegion: return "ByRegion";
      case DependencyFlagBits::eDeviceGroup: return "DeviceGroup";
      case DependencyFlagBits::eViewLocal: return "ViewLocal";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags
  {
    eImageless    = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
    eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value )
  {
    switch ( value )
    {
      case FramebufferCreateFlagBits::eImageless: return "Imageless";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PipelineBindPoint
  {
    eGraphics      = VK_PIPELINE_BIND_POINT_GRAPHICS,
    eCompute       = VK_PIPELINE_BIND_POINT_COMPUTE,
    eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
    eRayTracingNV  = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value )
  {
    switch ( value )
    {
      case PipelineBindPoint::eGraphics: return "Graphics";
      case PipelineBindPoint::eCompute: return "Compute";
      case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags
  {
    eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM
  };

  VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value )
  {
    switch ( value )
    {
      case RenderPassCreateFlagBits::eTransformQCOM: return "TransformQCOM";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags
  {
    ePerViewAttributesNVX    = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
    ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX,
    eFragmentRegionQCOM      = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM,
    eShaderResolveQCOM       = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM
  };

  VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
  {
    switch ( value )
    {
      case SubpassDescriptionFlagBits::ePerViewAttributesNVX: return "PerViewAttributesNVX";
      case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX: return "PerViewPositionXOnlyNVX";
      case SubpassDescriptionFlagBits::eFragmentRegionQCOM: return "FragmentRegionQCOM";
      case SubpassDescriptionFlagBits::eShaderResolveQCOM: return "ShaderResolveQCOM";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags
  {
    eTransient          = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
    eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
    eProtected          = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value )
  {
    switch ( value )
    {
      case CommandPoolCreateFlagBits::eTransient: return "Transient";
      case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer";
      case CommandPoolCreateFlagBits::eProtected: return "Protected";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags
  {
    eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value )
  {
    switch ( value )
    {
      case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CommandBufferLevel
  {
    ePrimary   = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
    eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
  };

  VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value )
  {
    switch ( value )
    {
      case CommandBufferLevel::ePrimary: return "Primary";
      case CommandBufferLevel::eSecondary: return "Secondary";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags
  {
    eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value )
  {
    switch ( value )
    {
      case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags
  {
    eOneTimeSubmit      = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
    eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
    eSimultaneousUse    = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value )
  {
    switch ( value )
    {
      case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit";
      case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue";
      case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class QueryControlFlagBits : VkQueryControlFlags
  {
    ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
  };

  VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value )
  {
    switch ( value )
    {
      case QueryControlFlagBits::ePrecise: return "Precise";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class IndexType
  {
    eUint16   = VK_INDEX_TYPE_UINT16,
    eUint32   = VK_INDEX_TYPE_UINT32,
    eNoneKHR  = VK_INDEX_TYPE_NONE_KHR,
    eUint8EXT = VK_INDEX_TYPE_UINT8_EXT,
    eNoneNV   = VK_INDEX_TYPE_NONE_NV
  };

  VULKAN_HPP_INLINE std::string to_string( IndexType value )
  {
    switch ( value )
    {
      case IndexType::eUint16: return "Uint16";
      case IndexType::eUint32: return "Uint32";
      case IndexType::eNoneKHR: return "NoneKHR";
      case IndexType::eUint8EXT: return "Uint8EXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class StencilFaceFlagBits : VkStencilFaceFlags
  {
    eFront                 = VK_STENCIL_FACE_FRONT_BIT,
    eBack                  = VK_STENCIL_FACE_BACK_BIT,
    eFrontAndBack          = VK_STENCIL_FACE_FRONT_AND_BACK,
    eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
  };

  VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value )
  {
    switch ( value )
    {
      case StencilFaceFlagBits::eFront: return "Front";
      case StencilFaceFlagBits::eBack: return "Back";
      case StencilFaceFlagBits::eFrontAndBack: return "FrontAndBack";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SubpassContents
  {
    eInline                  = VK_SUBPASS_CONTENTS_INLINE,
    eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
  };

  VULKAN_HPP_INLINE std::string to_string( SubpassContents value )
  {
    switch ( value )
    {
      case SubpassContents::eInline: return "Inline";
      case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_VERSION_1_1 ===

  enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags
  {
    eBasic           = VK_SUBGROUP_FEATURE_BASIC_BIT,
    eVote            = VK_SUBGROUP_FEATURE_VOTE_BIT,
    eArithmetic      = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
    eBallot          = VK_SUBGROUP_FEATURE_BALLOT_BIT,
    eShuffle         = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
    eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
    eClustered       = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
    eQuad            = VK_SUBGROUP_FEATURE_QUAD_BIT,
    ePartitionedNV   = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )
  {
    switch ( value )
    {
      case SubgroupFeatureFlagBits::eBasic: return "Basic";
      case SubgroupFeatureFlagBits::eVote: return "Vote";
      case SubgroupFeatureFlagBits::eArithmetic: return "Arithmetic";
      case SubgroupFeatureFlagBits::eBallot: return "Ballot";
      case SubgroupFeatureFlagBits::eShuffle: return "Shuffle";
      case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative";
      case SubgroupFeatureFlagBits::eClustered: return "Clustered";
      case SubgroupFeatureFlagBits::eQuad: return "Quad";
      case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags
  {
    eCopySrc    = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
    eCopyDst    = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
    eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
    eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT
  };
  using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits;

  VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )
  {
    switch ( value )
    {
      case PeerMemoryFeatureFlagBits::eCopySrc: return "CopySrc";
      case PeerMemoryFeatureFlagBits::eCopyDst: return "CopyDst";
      case PeerMemoryFeatureFlagBits::eGenericSrc: return "GenericSrc";
      case PeerMemoryFeatureFlagBits::eGenericDst: return "GenericDst";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags
  {
    eDeviceMask                 = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
    eDeviceAddress              = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
    eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT
  };
  using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits;

  VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value )
  {
    switch ( value )
    {
      case MemoryAllocateFlagBits::eDeviceMask: return "DeviceMask";
      case MemoryAllocateFlagBits::eDeviceAddress: return "DeviceAddress";
      case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PointClippingBehavior
  {
    eAllClipPlanes      = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
    eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY
  };
  using PointClippingBehaviorKHR = PointClippingBehavior;

  VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value )
  {
    switch ( value )
    {
      case PointClippingBehavior::eAllClipPlanes: return "AllClipPlanes";
      case PointClippingBehavior::eUserClipPlanesOnly: return "UserClipPlanesOnly";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class TessellationDomainOrigin
  {
    eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
    eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT
  };
  using TessellationDomainOriginKHR = TessellationDomainOrigin;

  VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value )
  {
    switch ( value )
    {
      case TessellationDomainOrigin::eUpperLeft: return "UpperLeft";
      case TessellationDomainOrigin::eLowerLeft: return "LowerLeft";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SamplerYcbcrModelConversion
  {
    eRgbIdentity   = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
    eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
    eYcbcr709      = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
    eYcbcr601      = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
    eYcbcr2020     = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020
  };
  using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion;

  VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value )
  {
    switch ( value )
    {
      case SamplerYcbcrModelConversion::eRgbIdentity: return "RgbIdentity";
      case SamplerYcbcrModelConversion::eYcbcrIdentity: return "YcbcrIdentity";
      case SamplerYcbcrModelConversion::eYcbcr709: return "Ycbcr709";
      case SamplerYcbcrModelConversion::eYcbcr601: return "Ycbcr601";
      case SamplerYcbcrModelConversion::eYcbcr2020: return "Ycbcr2020";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SamplerYcbcrRange
  {
    eItuFull   = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
    eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW
  };
  using SamplerYcbcrRangeKHR = SamplerYcbcrRange;

  VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value )
  {
    switch ( value )
    {
      case SamplerYcbcrRange::eItuFull: return "ItuFull";
      case SamplerYcbcrRange::eItuNarrow: return "ItuNarrow";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ChromaLocation
  {
    eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
    eMidpoint    = VK_CHROMA_LOCATION_MIDPOINT
  };
  using ChromaLocationKHR = ChromaLocation;

  VULKAN_HPP_INLINE std::string to_string( ChromaLocation value )
  {
    switch ( value )
    {
      case ChromaLocation::eCositedEven: return "CositedEven";
      case ChromaLocation::eMidpoint: return "Midpoint";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DescriptorUpdateTemplateType
  {
    eDescriptorSet      = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
    ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
  };
  using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType;

  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value )
  {
    switch ( value )
    {
      case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet";
      case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags
  {
    eOpaqueFd        = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
    eOpaqueWin32     = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
    eOpaqueWin32Kmt  = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
    eD3D11Texture    = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
    eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
    eD3D12Heap       = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
    eD3D12Resource   = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
    eDmaBufEXT       = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
    eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
    eHostAllocationEXT          = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
    eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT,
#if defined( VK_USE_PLATFORM_FUCHSIA )
    eZirconVmoFUCHSIA = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA
#endif /*VK_USE_PLATFORM_FUCHSIA*/
  };
  using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits;

  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )
  {
    switch ( value )
    {
      case ExternalMemoryHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
      case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
      case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
      case ExternalMemoryHandleTypeFlagBits::eD3D11Texture: return "D3D11Texture";
      case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt: return "D3D11TextureKmt";
      case ExternalMemoryHandleTypeFlagBits::eD3D12Heap: return "D3D12Heap";
      case ExternalMemoryHandleTypeFlagBits::eD3D12Resource: return "D3D12Resource";
      case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT: return "DmaBufEXT";
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
      case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID: return "AndroidHardwareBufferANDROID";
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
      case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT: return "HostAllocationEXT";
      case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT: return "HostMappedForeignMemoryEXT";
#if defined( VK_USE_PLATFORM_FUCHSIA )
      case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA: return "ZirconVmoFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags
  {
    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
    eExportable    = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
    eImportable    = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT
  };
  using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits;

  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )
  {
    switch ( value )
    {
      case ExternalMemoryFeatureFlagBits::eDedicatedOnly: return "DedicatedOnly";
      case ExternalMemoryFeatureFlagBits::eExportable: return "Exportable";
      case ExternalMemoryFeatureFlagBits::eImportable: return "Importable";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags
  {
    eOpaqueFd       = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
    eOpaqueWin32    = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
    eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
    eSyncFd         = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT
  };
  using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits;

  VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )
  {
    switch ( value )
    {
      case ExternalFenceHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
      case ExternalFenceHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
      case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
      case ExternalFenceHandleTypeFlagBits::eSyncFd: return "SyncFd";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags
  {
    eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
    eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT
  };
  using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits;

  VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )
  {
    switch ( value )
    {
      case ExternalFenceFeatureFlagBits::eExportable: return "Exportable";
      case ExternalFenceFeatureFlagBits::eImportable: return "Importable";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class FenceImportFlagBits : VkFenceImportFlags
  {
    eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT
  };
  using FenceImportFlagBitsKHR = FenceImportFlagBits;

  VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value )
  {
    switch ( value )
    {
      case FenceImportFlagBits::eTemporary: return "Temporary";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags
  {
    eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT
  };
  using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits;

  VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value )
  {
    switch ( value )
    {
      case SemaphoreImportFlagBits::eTemporary: return "Temporary";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags
  {
    eOpaqueFd       = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
    eOpaqueWin32    = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
    eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
    eD3D12Fence     = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
    eSyncFd         = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
#if defined( VK_USE_PLATFORM_FUCHSIA )
    eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT
  };
  using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits;

  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )
  {
    switch ( value )
    {
      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd: return "OpaqueFd";
      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32: return "OpaqueWin32";
      case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
      case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence: return "D3D12Fence";
      case ExternalSemaphoreHandleTypeFlagBits::eSyncFd: return "SyncFd";
#if defined( VK_USE_PLATFORM_FUCHSIA )
      case ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA: return "ZirconEventFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags
  {
    eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
    eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT
  };
  using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits;

  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )
  {
    switch ( value )
    {
      case ExternalSemaphoreFeatureFlagBits::eExportable: return "Exportable";
      case ExternalSemaphoreFeatureFlagBits::eImportable: return "Importable";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_VERSION_1_2 ===

  enum class DriverId
  {
    eAmdProprietary          = VK_DRIVER_ID_AMD_PROPRIETARY,
    eAmdOpenSource           = VK_DRIVER_ID_AMD_OPEN_SOURCE,
    eMesaRadv                = VK_DRIVER_ID_MESA_RADV,
    eNvidiaProprietary       = VK_DRIVER_ID_NVIDIA_PROPRIETARY,
    eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS,
    eIntelOpenSourceMESA     = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA,
    eImaginationProprietary  = VK_DRIVER_ID_IMAGINATION_PROPRIETARY,
    eQualcommProprietary     = VK_DRIVER_ID_QUALCOMM_PROPRIETARY,
    eArmProprietary          = VK_DRIVER_ID_ARM_PROPRIETARY,
    eGoogleSwiftshader       = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
    eGgpProprietary          = VK_DRIVER_ID_GGP_PROPRIETARY,
    eBroadcomProprietary     = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
    eMesaLlvmpipe            = VK_DRIVER_ID_MESA_LLVMPIPE,
    eMoltenvk                = VK_DRIVER_ID_MOLTENVK,
    eCoreaviProprietary      = VK_DRIVER_ID_COREAVI_PROPRIETARY,
    eJuiceProprietary        = VK_DRIVER_ID_JUICE_PROPRIETARY
  };
  using DriverIdKHR = DriverId;

  VULKAN_HPP_INLINE std::string to_string( DriverId value )
  {
    switch ( value )
    {
      case DriverId::eAmdProprietary: return "AmdProprietary";
      case DriverId::eAmdOpenSource: return "AmdOpenSource";
      case DriverId::eMesaRadv: return "MesaRadv";
      case DriverId::eNvidiaProprietary: return "NvidiaProprietary";
      case DriverId::eIntelProprietaryWindows: return "IntelProprietaryWindows";
      case DriverId::eIntelOpenSourceMESA: return "IntelOpenSourceMESA";
      case DriverId::eImaginationProprietary: return "ImaginationProprietary";
      case DriverId::eQualcommProprietary: return "QualcommProprietary";
      case DriverId::eArmProprietary: return "ArmProprietary";
      case DriverId::eGoogleSwiftshader: return "GoogleSwiftshader";
      case DriverId::eGgpProprietary: return "GgpProprietary";
      case DriverId::eBroadcomProprietary: return "BroadcomProprietary";
      case DriverId::eMesaLlvmpipe: return "MesaLlvmpipe";
      case DriverId::eMoltenvk: return "Moltenvk";
      case DriverId::eCoreaviProprietary: return "CoreaviProprietary";
      case DriverId::eJuiceProprietary: return "JuiceProprietary";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ShaderFloatControlsIndependence
  {
    e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
    eAll       = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
    eNone      = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE
  };
  using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence;

  VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value )
  {
    switch ( value )
    {
      case ShaderFloatControlsIndependence::e32BitOnly: return "32BitOnly";
      case ShaderFloatControlsIndependence::eAll: return "All";
      case ShaderFloatControlsIndependence::eNone: return "None";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags
  {
    eUpdateAfterBind          = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
    eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
    ePartiallyBound           = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT,
    eVariableDescriptorCount  = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT
  };
  using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits;

  VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value )
  {
    switch ( value )
    {
      case DescriptorBindingFlagBits::eUpdateAfterBind: return "UpdateAfterBind";
      case DescriptorBindingFlagBits::eUpdateUnusedWhilePending: return "UpdateUnusedWhilePending";
      case DescriptorBindingFlagBits::ePartiallyBound: return "PartiallyBound";
      case DescriptorBindingFlagBits::eVariableDescriptorCount: return "VariableDescriptorCount";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ResolveModeFlagBits : VkResolveModeFlags
  {
    eNone       = VK_RESOLVE_MODE_NONE,
    eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
    eAverage    = VK_RESOLVE_MODE_AVERAGE_BIT,
    eMin        = VK_RESOLVE_MODE_MIN_BIT,
    eMax        = VK_RESOLVE_MODE_MAX_BIT
  };
  using ResolveModeFlagBitsKHR = ResolveModeFlagBits;

  VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value )
  {
    switch ( value )
    {
      case ResolveModeFlagBits::eNone: return "None";
      case ResolveModeFlagBits::eSampleZero: return "SampleZero";
      case ResolveModeFlagBits::eAverage: return "Average";
      case ResolveModeFlagBits::eMin: return "Min";
      case ResolveModeFlagBits::eMax: return "Max";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SamplerReductionMode
  {
    eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
    eMin             = VK_SAMPLER_REDUCTION_MODE_MIN,
    eMax             = VK_SAMPLER_REDUCTION_MODE_MAX
  };
  using SamplerReductionModeEXT = SamplerReductionMode;

  VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value )
  {
    switch ( value )
    {
      case SamplerReductionMode::eWeightedAverage: return "WeightedAverage";
      case SamplerReductionMode::eMin: return "Min";
      case SamplerReductionMode::eMax: return "Max";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SemaphoreType
  {
    eBinary   = VK_SEMAPHORE_TYPE_BINARY,
    eTimeline = VK_SEMAPHORE_TYPE_TIMELINE
  };
  using SemaphoreTypeKHR = SemaphoreType;

  VULKAN_HPP_INLINE std::string to_string( SemaphoreType value )
  {
    switch ( value )
    {
      case SemaphoreType::eBinary: return "Binary";
      case SemaphoreType::eTimeline: return "Timeline";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags
  {
    eAny = VK_SEMAPHORE_WAIT_ANY_BIT
  };
  using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits;

  VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value )
  {
    switch ( value )
    {
      case SemaphoreWaitFlagBits::eAny: return "Any";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_KHR_surface ===

  enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR
  {
    eIdentity                  = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
    eRotate90                  = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
    eRotate180                 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
    eRotate270                 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
    eHorizontalMirror          = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
    eHorizontalMirrorRotate90  = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
    eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
    eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
    eInherit                   = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value )
  {
    switch ( value )
    {
      case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity";
      case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90";
      case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180";
      case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270";
      case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror";
      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90";
      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180";
      case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270";
      case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PresentModeKHR
  {
    eImmediate               = VK_PRESENT_MODE_IMMEDIATE_KHR,
    eMailbox                 = VK_PRESENT_MODE_MAILBOX_KHR,
    eFifo                    = VK_PRESENT_MODE_FIFO_KHR,
    eFifoRelaxed             = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
    eSharedDemandRefresh     = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
    eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value )
  {
    switch ( value )
    {
      case PresentModeKHR::eImmediate: return "Immediate";
      case PresentModeKHR::eMailbox: return "Mailbox";
      case PresentModeKHR::eFifo: return "Fifo";
      case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed";
      case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh";
      case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ColorSpaceKHR
  {
    eSrgbNonlinear             = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
    eDisplayP3NonlinearEXT     = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
    eExtendedSrgbLinearEXT     = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
    eDisplayP3LinearEXT        = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
    eDciP3NonlinearEXT         = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
    eBt709LinearEXT            = VK_COLOR_SPACE_BT709_LINEAR_EXT,
    eBt709NonlinearEXT         = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
    eBt2020LinearEXT           = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
    eHdr10St2084EXT            = VK_COLOR_SPACE_HDR10_ST2084_EXT,
    eDolbyvisionEXT            = VK_COLOR_SPACE_DOLBYVISION_EXT,
    eHdr10HlgEXT               = VK_COLOR_SPACE_HDR10_HLG_EXT,
    eAdobergbLinearEXT         = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
    eAdobergbNonlinearEXT      = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
    ePassThroughEXT            = VK_COLOR_SPACE_PASS_THROUGH_EXT,
    eExtendedSrgbNonlinearEXT  = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT,
    eDisplayNativeAMD          = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD,
    eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR,
    eDciP3LinearEXT            = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value )
  {
    switch ( value )
    {
      case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear";
      case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT";
      case ColorSpaceKHR::eExtendedSrgbLinearEXT: return "ExtendedSrgbLinearEXT";
      case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT";
      case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT";
      case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT";
      case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT";
      case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT";
      case ColorSpaceKHR::eHdr10St2084EXT: return "Hdr10St2084EXT";
      case ColorSpaceKHR::eDolbyvisionEXT: return "DolbyvisionEXT";
      case ColorSpaceKHR::eHdr10HlgEXT: return "Hdr10HlgEXT";
      case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT";
      case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT";
      case ColorSpaceKHR::ePassThroughEXT: return "PassThroughEXT";
      case ColorSpaceKHR::eExtendedSrgbNonlinearEXT: return "ExtendedSrgbNonlinearEXT";
      case ColorSpaceKHR::eDisplayNativeAMD: return "DisplayNativeAMD";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR
  {
    eOpaque         = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
    ePreMultiplied  = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
    ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
    eInherit        = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value )
  {
    switch ( value )
    {
      case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque";
      case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied";
      case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied";
      case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_KHR_swapchain ===

  enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR
  {
    eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
    eProtected                = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
    eMutableFormat            = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )
  {
    switch ( value )
    {
      case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions";
      case SwapchainCreateFlagBitsKHR::eProtected: return "Protected";
      case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR
  {
    eLocal            = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
    eRemote           = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
    eSum              = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
    eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value )
  {
    switch ( value )
    {
      case DeviceGroupPresentModeFlagBitsKHR::eLocal: return "Local";
      case DeviceGroupPresentModeFlagBitsKHR::eRemote: return "Remote";
      case DeviceGroupPresentModeFlagBitsKHR::eSum: return "Sum";
      case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice: return "LocalMultiDevice";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_KHR_display ===

  enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR
  {
    eOpaque                = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
    eGlobal                = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
    ePerPixel              = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
    ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value )
  {
    switch ( value )
    {
      case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque";
      case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global";
      case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel";
      case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_debug_report ===

  enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT
  {
    eInformation        = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
    eWarning            = VK_DEBUG_REPORT_WARNING_BIT_EXT,
    ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
    eError              = VK_DEBUG_REPORT_ERROR_BIT_EXT,
    eDebug              = VK_DEBUG_REPORT_DEBUG_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value )
  {
    switch ( value )
    {
      case DebugReportFlagBitsEXT::eInformation: return "Information";
      case DebugReportFlagBitsEXT::eWarning: return "Warning";
      case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning";
      case DebugReportFlagBitsEXT::eError: return "Error";
      case DebugReportFlagBitsEXT::eDebug: return "Debug";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DebugReportObjectTypeEXT
  {
    eUnknown                     = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
    eInstance                    = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
    ePhysicalDevice              = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
    eDevice                      = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
    eQueue                       = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
    eSemaphore                   = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
    eCommandBuffer               = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
    eFence                       = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
    eDeviceMemory                = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
    eBuffer                      = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
    eImage                       = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
    eEvent                       = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
    eQueryPool                   = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
    eBufferView                  = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
    eImageView                   = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
    eShaderModule                = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
    ePipelineCache               = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
    ePipelineLayout              = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
    eRenderPass                  = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
    ePipeline                    = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
    eDescriptorSetLayout         = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
    eSampler                     = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
    eDescriptorPool              = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
    eDescriptorSet               = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
    eFramebuffer                 = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
    eCommandPool                 = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
    eSurfaceKHR                  = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
    eSwapchainKHR                = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
    eDebugReportCallbackEXT      = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
    eDisplayKHR                  = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
    eDisplayModeKHR              = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
    eValidationCacheEXT          = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
    eSamplerYcbcrConversion      = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
    eDescriptorUpdateTemplate    = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
    eCuModuleNVX                 = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT,
    eCuFunctionNVX               = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT,
    eAccelerationStructureKHR    = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT,
    eAccelerationStructureNV     = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
    eDebugReport                 = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
    eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT,
    eSamplerYcbcrConversionKHR   = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
    eValidationCache             = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value )
  {
    switch ( value )
    {
      case DebugReportObjectTypeEXT::eUnknown: return "Unknown";
      case DebugReportObjectTypeEXT::eInstance: return "Instance";
      case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice";
      case DebugReportObjectTypeEXT::eDevice: return "Device";
      case DebugReportObjectTypeEXT::eQueue: return "Queue";
      case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore";
      case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer";
      case DebugReportObjectTypeEXT::eFence: return "Fence";
      case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory";
      case DebugReportObjectTypeEXT::eBuffer: return "Buffer";
      case DebugReportObjectTypeEXT::eImage: return "Image";
      case DebugReportObjectTypeEXT::eEvent: return "Event";
      case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool";
      case DebugReportObjectTypeEXT::eBufferView: return "BufferView";
      case DebugReportObjectTypeEXT::eImageView: return "ImageView";
      case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule";
      case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache";
      case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout";
      case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass";
      case DebugReportObjectTypeEXT::ePipeline: return "Pipeline";
      case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout";
      case DebugReportObjectTypeEXT::eSampler: return "Sampler";
      case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool";
      case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet";
      case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer";
      case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool";
      case DebugReportObjectTypeEXT::eSurfaceKHR: return "SurfaceKHR";
      case DebugReportObjectTypeEXT::eSwapchainKHR: return "SwapchainKHR";
      case DebugReportObjectTypeEXT::eDebugReportCallbackEXT: return "DebugReportCallbackEXT";
      case DebugReportObjectTypeEXT::eDisplayKHR: return "DisplayKHR";
      case DebugReportObjectTypeEXT::eDisplayModeKHR: return "DisplayModeKHR";
      case DebugReportObjectTypeEXT::eValidationCacheEXT: return "ValidationCacheEXT";
      case DebugReportObjectTypeEXT::eSamplerYcbcrConversion: return "SamplerYcbcrConversion";
      case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate: return "DescriptorUpdateTemplate";
      case DebugReportObjectTypeEXT::eCuModuleNVX: return "CuModuleNVX";
      case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX";
      case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR";
      case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_AMD_rasterization_order ===

  enum class RasterizationOrderAMD
  {
    eStrict  = VK_RASTERIZATION_ORDER_STRICT_AMD,
    eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
  };

  VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value )
  {
    switch ( value )
    {
      case RasterizationOrderAMD::eStrict: return "Strict";
      case RasterizationOrderAMD::eRelaxed: return "Relaxed";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_KHR_video_queue ===

  enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR
  {
    eInvalid = VK_VIDEO_CODEC_OPERATION_INVALID_BIT_KHR,
#  if defined( VK_ENABLE_BETA_EXTENSIONS )
    eEncodeH264EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT,
    eDecodeH264EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT,
    eDecodeH265EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/
  };

  VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoCodecOperationFlagBitsKHR::eInvalid: return "Invalid";
#  if defined( VK_ENABLE_BETA_EXTENSIONS )
      case VideoCodecOperationFlagBitsKHR::eEncodeH264EXT: return "EncodeH264EXT";
      case VideoCodecOperationFlagBitsKHR::eDecodeH264EXT: return "DecodeH264EXT";
      case VideoCodecOperationFlagBitsKHR::eDecodeH265EXT: return "DecodeH265EXT";
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR
  {
    eInvalid    = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_BIT_KHR,
    eMonochrome = VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR,
    e420        = VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR,
    e422        = VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR,
    e444        = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoChromaSubsamplingFlagBitsKHR::eInvalid: return "Invalid";
      case VideoChromaSubsamplingFlagBitsKHR::eMonochrome: return "Monochrome";
      case VideoChromaSubsamplingFlagBitsKHR::e420: return "420";
      case VideoChromaSubsamplingFlagBitsKHR::e422: return "422";
      case VideoChromaSubsamplingFlagBitsKHR::e444: return "444";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR
  {
    eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR,
    e8       = VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR,
    e10      = VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR,
    e12      = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoComponentBitDepthFlagBitsKHR::eInvalid: return "Invalid";
      case VideoComponentBitDepthFlagBitsKHR::e8: return "8";
      case VideoComponentBitDepthFlagBitsKHR::e10: return "10";
      case VideoComponentBitDepthFlagBitsKHR::e12: return "12";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoCapabilitiesFlagBitsKHR : VkVideoCapabilitiesFlagsKHR
  {
    eProtectedContent        = VK_VIDEO_CAPABILITIES_PROTECTED_CONTENT_BIT_KHR,
    eSeparateReferenceImages = VK_VIDEO_CAPABILITIES_SEPARATE_REFERENCE_IMAGES_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoCapabilitiesFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoCapabilitiesFlagBitsKHR::eProtectedContent: return "ProtectedContent";
      case VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages: return "SeparateReferenceImages";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR
  {
    eDefault          = VK_VIDEO_SESSION_CREATE_DEFAULT_KHR,
    eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoSessionCreateFlagBitsKHR::eDefault: return "Default";
      case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR
  {
    eDefault = VK_VIDEO_CODING_CONTROL_DEFAULT_KHR,
    eReset   = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoCodingControlFlagBitsKHR::eDefault: return "Default";
      case VideoCodingControlFlagBitsKHR::eReset: return "Reset";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoCodingQualityPresetFlagBitsKHR : VkVideoCodingQualityPresetFlagsKHR
  {
    eDefault = VK_VIDEO_CODING_QUALITY_PRESET_DEFAULT_BIT_KHR,
    eNormal  = VK_VIDEO_CODING_QUALITY_PRESET_NORMAL_BIT_KHR,
    ePower   = VK_VIDEO_CODING_QUALITY_PRESET_POWER_BIT_KHR,
    eQuality = VK_VIDEO_CODING_QUALITY_PRESET_QUALITY_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoCodingQualityPresetFlagBitsKHR::eDefault: return "Default";
      case VideoCodingQualityPresetFlagBitsKHR::eNormal: return "Normal";
      case VideoCodingQualityPresetFlagBitsKHR::ePower: return "Power";
      case VideoCodingQualityPresetFlagBitsKHR::eQuality: return "Quality";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class QueryResultStatusKHR
  {
    eError    = VK_QUERY_RESULT_STATUS_ERROR_KHR,
    eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR,
    eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( QueryResultStatusKHR value )
  {
    switch ( value )
    {
      case QueryResultStatusKHR::eError: return "Error";
      case QueryResultStatusKHR::eNotReady: return "NotReady";
      case QueryResultStatusKHR::eComplete: return "Complete";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_KHR_video_decode_queue ===

  enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR
  {
    eDefault   = VK_VIDEO_DECODE_DEFAULT_KHR,
    eReserved0 = VK_VIDEO_DECODE_RESERVED_0_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoDecodeFlagBitsKHR::eDefault: return "Default";
      case VideoDecodeFlagBitsKHR::eReserved0: return "Reserved0";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_EXT_video_encode_h264 ===

  enum class VideoEncodeH264CapabilitiesFlagBitsEXT : VkVideoEncodeH264CapabilitiesFlagsEXT
  {
    eVkVideoEncodeH264CapabilityCabac = VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT,
    eVkVideoEncodeH264CapabilityCavlc = VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT,
    eVkVideoEncodeH264CapabilityWeightedBiPredImplicit =
      VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BI_PRED_IMPLICIT_BIT_EXT,
    eVkVideoEncodeH264CapabilityTransform8X8         = VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT,
    eVkVideoEncodeH264CapabilityChromaQpOffset       = VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT,
    eVkVideoEncodeH264CapabilitySecondChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT,
    eVkVideoEncodeH264CapabilityDeblockingFilterDisabled =
      VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT,
    eVkVideoEncodeH264CapabilityDeblockingFilterEnabled =
      VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT,
    eVkVideoEncodeH264CapabilityDeblockingFilterPartial =
      VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT,
    eVkVideoEncodeH264CapabilityMultipleSlicePerFrame =
      VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT,
    eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize =
      VK_VIDEO_ENCODE_H264_CAPABILITY_EVENLY_DISTRIBUTED_SLICE_SIZE_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilitiesFlagBitsEXT value )
  {
    switch ( value )
    {
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac:
        return "VkVideoEncodeH264CapabilityCabac";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc:
        return "VkVideoEncodeH264CapabilityCavlc";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityWeightedBiPredImplicit:
        return "VkVideoEncodeH264CapabilityWeightedBiPredImplicit";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityTransform8X8:
        return "VkVideoEncodeH264CapabilityTransform8X8";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityChromaQpOffset:
        return "VkVideoEncodeH264CapabilityChromaQpOffset";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilitySecondChromaQpOffset:
        return "VkVideoEncodeH264CapabilitySecondChromaQpOffset";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterDisabled:
        return "VkVideoEncodeH264CapabilityDeblockingFilterDisabled";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterEnabled:
        return "VkVideoEncodeH264CapabilityDeblockingFilterEnabled";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterPartial:
        return "VkVideoEncodeH264CapabilityDeblockingFilterPartial";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityMultipleSlicePerFrame:
        return "VkVideoEncodeH264CapabilityMultipleSlicePerFrame";
      case VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize:
        return "VkVideoEncodeH264CapabilityEvenlyDistributedSliceSize";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoEncodeH264InputModeFlagBitsEXT : VkVideoEncodeH264InputModeFlagsEXT
  {
    eFrame  = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT,
    eSlice  = VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT,
    eNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagBitsEXT value )
  {
    switch ( value )
    {
      case VideoEncodeH264InputModeFlagBitsEXT::eFrame: return "Frame";
      case VideoEncodeH264InputModeFlagBitsEXT::eSlice: return "Slice";
      case VideoEncodeH264InputModeFlagBitsEXT::eNonVcl: return "NonVcl";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoEncodeH264OutputModeFlagBitsEXT : VkVideoEncodeH264OutputModeFlagsEXT
  {
    eFrame  = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT,
    eSlice  = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT,
    eNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagBitsEXT value )
  {
    switch ( value )
    {
      case VideoEncodeH264OutputModeFlagBitsEXT::eFrame: return "Frame";
      case VideoEncodeH264OutputModeFlagBitsEXT::eSlice: return "Slice";
      case VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl: return "NonVcl";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoEncodeH264CreateFlagBitsEXT : VkVideoEncodeH264CreateFlagsEXT
  {
    eDefault   = VK_VIDEO_ENCODE_H264_CREATE_DEFAULT_EXT,
    eReserved0 = VK_VIDEO_ENCODE_H264_CREATE_RESERVED_0_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagBitsEXT value )
  {
    switch ( value )
    {
      case VideoEncodeH264CreateFlagBitsEXT::eDefault: return "Default";
      case VideoEncodeH264CreateFlagBitsEXT::eReserved0: return "Reserved0";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_EXT_video_decode_h264 ===

  enum class VideoDecodeH264FieldLayoutFlagBitsEXT : VkVideoDecodeH264FieldLayoutFlagsEXT
  {
    eVkVideoDecodeH264ProgressivePicturesOnly = VK_VIDEO_DECODE_H264_PROGRESSIVE_PICTURES_ONLY_EXT,
    eLineInterlacedPlane                      = VK_VIDEO_DECODE_H264_FIELD_LAYOUT_LINE_INTERLACED_PLANE_BIT_EXT,
    eSeparateInterlacedPlane                  = VK_VIDEO_DECODE_H264_FIELD_LAYOUT_SEPARATE_INTERLACED_PLANE_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264FieldLayoutFlagBitsEXT value )
  {
    switch ( value )
    {
      case VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264ProgressivePicturesOnly:
        return "VkVideoDecodeH264ProgressivePicturesOnly";
      case VideoDecodeH264FieldLayoutFlagBitsEXT::eLineInterlacedPlane: return "LineInterlacedPlane";
      case VideoDecodeH264FieldLayoutFlagBitsEXT::eSeparateInterlacedPlane: return "SeparateInterlacedPlane";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  //=== VK_AMD_shader_info ===

  enum class ShaderInfoTypeAMD
  {
    eStatistics  = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
    eBinary      = VK_SHADER_INFO_TYPE_BINARY_AMD,
    eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD
  };

  VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value )
  {
    switch ( value )
    {
      case ShaderInfoTypeAMD::eStatistics: return "Statistics";
      case ShaderInfoTypeAMD::eBinary: return "Binary";
      case ShaderInfoTypeAMD::eDisassembly: return "Disassembly";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_NV_external_memory_capabilities ===

  enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV
  {
    eOpaqueWin32    = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
    eD3D11Image     = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
    eD3D11ImageKmt  = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )
  {
    switch ( value )
    {
      case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32";
      case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
      case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image";
      case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV
  {
    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
    eExportable    = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
    eImportable    = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )
  {
    switch ( value )
    {
      case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly";
      case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable";
      case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_validation_flags ===

  enum class ValidationCheckEXT
  {
    eAll     = VK_VALIDATION_CHECK_ALL_EXT,
    eShaders = VK_VALIDATION_CHECK_SHADERS_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value )
  {
    switch ( value )
    {
      case ValidationCheckEXT::eAll: return "All";
      case ValidationCheckEXT::eShaders: return "Shaders";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_conditional_rendering ===

  enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT
  {
    eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value )
  {
    switch ( value )
    {
      case ConditionalRenderingFlagBitsEXT::eInverted: return "Inverted";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_display_surface_counter ===

  enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT
  {
    eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value )
  {
    switch ( value )
    {
      case SurfaceCounterFlagBitsEXT::eVblank: return "Vblank";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_display_control ===

  enum class DisplayPowerStateEXT
  {
    eOff     = VK_DISPLAY_POWER_STATE_OFF_EXT,
    eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
    eOn      = VK_DISPLAY_POWER_STATE_ON_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value )
  {
    switch ( value )
    {
      case DisplayPowerStateEXT::eOff: return "Off";
      case DisplayPowerStateEXT::eSuspend: return "Suspend";
      case DisplayPowerStateEXT::eOn: return "On";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DeviceEventTypeEXT
  {
    eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value )
  {
    switch ( value )
    {
      case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DisplayEventTypeEXT
  {
    eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value )
  {
    switch ( value )
    {
      case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_NV_viewport_swizzle ===

  enum class ViewportCoordinateSwizzleNV
  {
    ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
    eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV,
    ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
    eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV,
    ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV,
    eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV,
    ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV,
    eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV
  };

  VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value )
  {
    switch ( value )
    {
      case ViewportCoordinateSwizzleNV::ePositiveX: return "PositiveX";
      case ViewportCoordinateSwizzleNV::eNegativeX: return "NegativeX";
      case ViewportCoordinateSwizzleNV::ePositiveY: return "PositiveY";
      case ViewportCoordinateSwizzleNV::eNegativeY: return "NegativeY";
      case ViewportCoordinateSwizzleNV::ePositiveZ: return "PositiveZ";
      case ViewportCoordinateSwizzleNV::eNegativeZ: return "NegativeZ";
      case ViewportCoordinateSwizzleNV::ePositiveW: return "PositiveW";
      case ViewportCoordinateSwizzleNV::eNegativeW: return "NegativeW";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_discard_rectangles ===

  enum class DiscardRectangleModeEXT
  {
    eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
    eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value )
  {
    switch ( value )
    {
      case DiscardRectangleModeEXT::eInclusive: return "Inclusive";
      case DiscardRectangleModeEXT::eExclusive: return "Exclusive";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_conservative_rasterization ===

  enum class ConservativeRasterizationModeEXT
  {
    eDisabled      = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
    eOverestimate  = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT,
    eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value )
  {
    switch ( value )
    {
      case ConservativeRasterizationModeEXT::eDisabled: return "Disabled";
      case ConservativeRasterizationModeEXT::eOverestimate: return "Overestimate";
      case ConservativeRasterizationModeEXT::eUnderestimate: return "Underestimate";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_KHR_performance_query ===

  enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR
  {
    ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR,
    eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value )
  {
    switch ( value )
    {
      case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting: return "PerformanceImpacting";
      case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted: return "ConcurrentlyImpacted";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PerformanceCounterScopeKHR
  {
    eCommandBuffer             = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
    eRenderPass                = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
    eCommand                   = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
    eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR,
    eVkQueryScopeCommand       = VK_QUERY_SCOPE_COMMAND_KHR,
    eVkQueryScopeRenderPass    = VK_QUERY_SCOPE_RENDER_PASS_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value )
  {
    switch ( value )
    {
      case PerformanceCounterScopeKHR::eCommandBuffer: return "CommandBuffer";
      case PerformanceCounterScopeKHR::eRenderPass: return "RenderPass";
      case PerformanceCounterScopeKHR::eCommand: return "Command";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PerformanceCounterStorageKHR
  {
    eInt32   = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
    eInt64   = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR,
    eUint32  = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR,
    eUint64  = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR,
    eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR,
    eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value )
  {
    switch ( value )
    {
      case PerformanceCounterStorageKHR::eInt32: return "Int32";
      case PerformanceCounterStorageKHR::eInt64: return "Int64";
      case PerformanceCounterStorageKHR::eUint32: return "Uint32";
      case PerformanceCounterStorageKHR::eUint64: return "Uint64";
      case PerformanceCounterStorageKHR::eFloat32: return "Float32";
      case PerformanceCounterStorageKHR::eFloat64: return "Float64";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PerformanceCounterUnitKHR
  {
    eGeneric        = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
    ePercentage     = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR,
    eNanoseconds    = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR,
    eBytes          = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR,
    eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR,
    eKelvin         = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR,
    eWatts          = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR,
    eVolts          = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR,
    eAmps           = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR,
    eHertz          = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR,
    eCycles         = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value )
  {
    switch ( value )
    {
      case PerformanceCounterUnitKHR::eGeneric: return "Generic";
      case PerformanceCounterUnitKHR::ePercentage: return "Percentage";
      case PerformanceCounterUnitKHR::eNanoseconds: return "Nanoseconds";
      case PerformanceCounterUnitKHR::eBytes: return "Bytes";
      case PerformanceCounterUnitKHR::eBytesPerSecond: return "BytesPerSecond";
      case PerformanceCounterUnitKHR::eKelvin: return "Kelvin";
      case PerformanceCounterUnitKHR::eWatts: return "Watts";
      case PerformanceCounterUnitKHR::eVolts: return "Volts";
      case PerformanceCounterUnitKHR::eAmps: return "Amps";
      case PerformanceCounterUnitKHR::eHertz: return "Hertz";
      case PerformanceCounterUnitKHR::eCycles: return "Cycles";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR
  {
  };

  VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR )
  {
    return "(void)";
  }

  //=== VK_EXT_debug_utils ===

  enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT
  {
    eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
    eInfo    = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
    eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
    eError   = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )
  {
    switch ( value )
    {
      case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose: return "Verbose";
      case DebugUtilsMessageSeverityFlagBitsEXT::eInfo: return "Info";
      case DebugUtilsMessageSeverityFlagBitsEXT::eWarning: return "Warning";
      case DebugUtilsMessageSeverityFlagBitsEXT::eError: return "Error";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT
  {
    eGeneral     = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
    eValidation  = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
    ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )
  {
    switch ( value )
    {
      case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General";
      case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation";
      case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_blend_operation_advanced ===

  enum class BlendOverlapEXT
  {
    eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
    eDisjoint     = VK_BLEND_OVERLAP_DISJOINT_EXT,
    eConjoint     = VK_BLEND_OVERLAP_CONJOINT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value )
  {
    switch ( value )
    {
      case BlendOverlapEXT::eUncorrelated: return "Uncorrelated";
      case BlendOverlapEXT::eDisjoint: return "Disjoint";
      case BlendOverlapEXT::eConjoint: return "Conjoint";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_KHR_acceleration_structure ===

  enum class AccelerationStructureTypeKHR
  {
    eTopLevel    = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR,
    eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR,
    eGeneric     = VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR
  };
  using AccelerationStructureTypeNV = AccelerationStructureTypeKHR;

  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value )
  {
    switch ( value )
    {
      case AccelerationStructureTypeKHR::eTopLevel: return "TopLevel";
      case AccelerationStructureTypeKHR::eBottomLevel: return "BottomLevel";
      case AccelerationStructureTypeKHR::eGeneric: return "Generic";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class AccelerationStructureBuildTypeKHR
  {
    eHost         = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR,
    eDevice       = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR,
    eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value )
  {
    switch ( value )
    {
      case AccelerationStructureBuildTypeKHR::eHost: return "Host";
      case AccelerationStructureBuildTypeKHR::eDevice: return "Device";
      case AccelerationStructureBuildTypeKHR::eHostOrDevice: return "HostOrDevice";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR
  {
    eOpaque                      = VK_GEOMETRY_OPAQUE_BIT_KHR,
    eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR
  };
  using GeometryFlagBitsNV = GeometryFlagBitsKHR;

  VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value )
  {
    switch ( value )
    {
      case GeometryFlagBitsKHR::eOpaque: return "Opaque";
      case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation: return "NoDuplicateAnyHitInvocation";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR
  {
    eTriangleFacingCullDisable     = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
    eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
    eForceOpaque                   = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
    eForceNoOpaque                 = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
    eTriangleCullDisable           = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV
  };
  using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR;

  VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value )
  {
    switch ( value )
    {
      case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable: return "TriangleFacingCullDisable";
      case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise: return "TriangleFrontCounterclockwise";
      case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque";
      case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR
  {
    eAllowUpdate     = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
    eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
    ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
    ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
    eLowMemory       = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR
  };
  using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR;

  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value )
  {
    switch ( value )
    {
      case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate: return "AllowUpdate";
      case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction: return "AllowCompaction";
      case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace: return "PreferFastTrace";
      case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild";
      case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CopyAccelerationStructureModeKHR
  {
    eClone       = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
    eCompact     = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR,
    eSerialize   = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR,
    eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR
  };
  using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR;

  VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value )
  {
    switch ( value )
    {
      case CopyAccelerationStructureModeKHR::eClone: return "Clone";
      case CopyAccelerationStructureModeKHR::eCompact: return "Compact";
      case CopyAccelerationStructureModeKHR::eSerialize: return "Serialize";
      case CopyAccelerationStructureModeKHR::eDeserialize: return "Deserialize";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class GeometryTypeKHR
  {
    eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
    eAabbs     = VK_GEOMETRY_TYPE_AABBS_KHR,
    eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR
  };
  using GeometryTypeNV = GeometryTypeKHR;

  VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value )
  {
    switch ( value )
    {
      case GeometryTypeKHR::eTriangles: return "Triangles";
      case GeometryTypeKHR::eAabbs: return "Aabbs";
      case GeometryTypeKHR::eInstances: return "Instances";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class AccelerationStructureCompatibilityKHR
  {
    eCompatible   = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR,
    eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCompatibilityKHR value )
  {
    switch ( value )
    {
      case AccelerationStructureCompatibilityKHR::eCompatible: return "Compatible";
      case AccelerationStructureCompatibilityKHR::eIncompatible: return "Incompatible";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR
  {
    eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagBitsKHR value )
  {
    switch ( value )
    {
      case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class BuildAccelerationStructureModeKHR
  {
    eBuild  = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR,
    eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureModeKHR value )
  {
    switch ( value )
    {
      case BuildAccelerationStructureModeKHR::eBuild: return "Build";
      case BuildAccelerationStructureModeKHR::eUpdate: return "Update";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_NV_framebuffer_mixed_samples ===

  enum class CoverageModulationModeNV
  {
    eNone  = VK_COVERAGE_MODULATION_MODE_NONE_NV,
    eRgb   = VK_COVERAGE_MODULATION_MODE_RGB_NV,
    eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV,
    eRgba  = VK_COVERAGE_MODULATION_MODE_RGBA_NV
  };

  VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value )
  {
    switch ( value )
    {
      case CoverageModulationModeNV::eNone: return "None";
      case CoverageModulationModeNV::eRgb: return "Rgb";
      case CoverageModulationModeNV::eAlpha: return "Alpha";
      case CoverageModulationModeNV::eRgba: return "Rgba";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_validation_cache ===

  enum class ValidationCacheHeaderVersionEXT
  {
    eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value )
  {
    switch ( value )
    {
      case ValidationCacheHeaderVersionEXT::eOne: return "One";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_NV_shading_rate_image ===

  enum class ShadingRatePaletteEntryNV
  {
    eNoInvocations           = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
    e16InvocationsPerPixel   = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV,
    e8InvocationsPerPixel    = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV,
    e4InvocationsPerPixel    = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV,
    e2InvocationsPerPixel    = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV,
    e1InvocationPerPixel     = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV,
    e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV,
    e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
    e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV,
    e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV,
    e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV,
    e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV
  };

  VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value )
  {
    switch ( value )
    {
      case ShadingRatePaletteEntryNV::eNoInvocations: return "NoInvocations";
      case ShadingRatePaletteEntryNV::e16InvocationsPerPixel: return "16InvocationsPerPixel";
      case ShadingRatePaletteEntryNV::e8InvocationsPerPixel: return "8InvocationsPerPixel";
      case ShadingRatePaletteEntryNV::e4InvocationsPerPixel: return "4InvocationsPerPixel";
      case ShadingRatePaletteEntryNV::e2InvocationsPerPixel: return "2InvocationsPerPixel";
      case ShadingRatePaletteEntryNV::e1InvocationPerPixel: return "1InvocationPerPixel";
      case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels";
      case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels";
      case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels";
      case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels";
      case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels";
      case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class CoarseSampleOrderTypeNV
  {
    eDefault     = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
    eCustom      = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV,
    ePixelMajor  = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV,
    eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV
  };

  VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value )
  {
    switch ( value )
    {
      case CoarseSampleOrderTypeNV::eDefault: return "Default";
      case CoarseSampleOrderTypeNV::eCustom: return "Custom";
      case CoarseSampleOrderTypeNV::ePixelMajor: return "PixelMajor";
      case CoarseSampleOrderTypeNV::eSampleMajor: return "SampleMajor";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_NV_ray_tracing ===

  enum class AccelerationStructureMemoryRequirementsTypeNV
  {
    eObject        = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV,
    eBuildScratch  = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV,
    eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV
  };

  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value )
  {
    switch ( value )
    {
      case AccelerationStructureMemoryRequirementsTypeNV::eObject: return "Object";
      case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch: return "BuildScratch";
      case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch: return "UpdateScratch";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_global_priority ===

  enum class QueueGlobalPriorityEXT
  {
    eLow      = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT,
    eMedium   = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT,
    eHigh     = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT,
    eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityEXT value )
  {
    switch ( value )
    {
      case QueueGlobalPriorityEXT::eLow: return "Low";
      case QueueGlobalPriorityEXT::eMedium: return "Medium";
      case QueueGlobalPriorityEXT::eHigh: return "High";
      case QueueGlobalPriorityEXT::eRealtime: return "Realtime";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_AMD_pipeline_compiler_control ===

  enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
  {
    return "(void)";
  }

  //=== VK_EXT_calibrated_timestamps ===

  enum class TimeDomainEXT
  {
    eDevice                  = VK_TIME_DOMAIN_DEVICE_EXT,
    eClockMonotonic          = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
    eClockMonotonicRaw       = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT,
    eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value )
  {
    switch ( value )
    {
      case TimeDomainEXT::eDevice: return "Device";
      case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic";
      case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw";
      case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_AMD_memory_overallocation_behavior ===

  enum class MemoryOverallocationBehaviorAMD
  {
    eDefault    = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
    eAllowed    = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD,
    eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD
  };

  VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value )
  {
    switch ( value )
    {
      case MemoryOverallocationBehaviorAMD::eDefault: return "Default";
      case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed";
      case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_pipeline_creation_feedback ===

  enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT
  {
    eValid                       = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT,
    eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT,
    eBasePipelineAcceleration    = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value )
  {
    switch ( value )
    {
      case PipelineCreationFeedbackFlagBitsEXT::eValid: return "Valid";
      case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit: return "ApplicationPipelineCacheHit";
      case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration: return "BasePipelineAcceleration";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_INTEL_performance_query ===

  enum class PerformanceConfigurationTypeINTEL
  {
    eCommandQueueMetricsDiscoveryActivated =
      VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL
  };

  VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value )
  {
    switch ( value )
    {
      case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated:
        return "CommandQueueMetricsDiscoveryActivated";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class QueryPoolSamplingModeINTEL
  {
    eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL
  };

  VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value )
  {
    switch ( value )
    {
      case QueryPoolSamplingModeINTEL::eManual: return "Manual";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PerformanceOverrideTypeINTEL
  {
    eNullHardware   = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
    eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL
  };

  VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value )
  {
    switch ( value )
    {
      case PerformanceOverrideTypeINTEL::eNullHardware: return "NullHardware";
      case PerformanceOverrideTypeINTEL::eFlushGpuCaches: return "FlushGpuCaches";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PerformanceParameterTypeINTEL
  {
    eHwCountersSupported   = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL,
    eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL
  };

  VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value )
  {
    switch ( value )
    {
      case PerformanceParameterTypeINTEL::eHwCountersSupported: return "HwCountersSupported";
      case PerformanceParameterTypeINTEL::eStreamMarkerValidBits: return "StreamMarkerValidBits";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class PerformanceValueTypeINTEL
  {
    eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL,
    eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL,
    eFloat  = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL,
    eBool   = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL,
    eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL
  };

  VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value )
  {
    switch ( value )
    {
      case PerformanceValueTypeINTEL::eUint32: return "Uint32";
      case PerformanceValueTypeINTEL::eUint64: return "Uint64";
      case PerformanceValueTypeINTEL::eFloat: return "Float";
      case PerformanceValueTypeINTEL::eBool: return "Bool";
      case PerformanceValueTypeINTEL::eString: return "String";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_KHR_fragment_shading_rate ===

  enum class FragmentShadingRateCombinerOpKHR
  {
    eKeep    = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR,
    eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR,
    eMin     = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR,
    eMax     = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR,
    eMul     = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value )
  {
    switch ( value )
    {
      case FragmentShadingRateCombinerOpKHR::eKeep: return "Keep";
      case FragmentShadingRateCombinerOpKHR::eReplace: return "Replace";
      case FragmentShadingRateCombinerOpKHR::eMin: return "Min";
      case FragmentShadingRateCombinerOpKHR::eMax: return "Max";
      case FragmentShadingRateCombinerOpKHR::eMul: return "Mul";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_AMD_shader_core_properties2 ===

  enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD
  {
  };

  VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
  {
    return "(void)";
  }

  //=== VK_EXT_tooling_info ===

  enum class ToolPurposeFlagBitsEXT : VkToolPurposeFlagsEXT
  {
    eValidation         = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT,
    eProfiling          = VK_TOOL_PURPOSE_PROFILING_BIT_EXT,
    eTracing            = VK_TOOL_PURPOSE_TRACING_BIT_EXT,
    eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT,
    eModifyingFeatures  = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT,
    eDebugReporting     = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT,
    eDebugMarkers       = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBitsEXT value )
  {
    switch ( value )
    {
      case ToolPurposeFlagBitsEXT::eValidation: return "Validation";
      case ToolPurposeFlagBitsEXT::eProfiling: return "Profiling";
      case ToolPurposeFlagBitsEXT::eTracing: return "Tracing";
      case ToolPurposeFlagBitsEXT::eAdditionalFeatures: return "AdditionalFeatures";
      case ToolPurposeFlagBitsEXT::eModifyingFeatures: return "ModifyingFeatures";
      case ToolPurposeFlagBitsEXT::eDebugReporting: return "DebugReporting";
      case ToolPurposeFlagBitsEXT::eDebugMarkers: return "DebugMarkers";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_validation_features ===

  enum class ValidationFeatureEnableEXT
  {
    eGpuAssisted                   = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
    eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT,
    eBestPractices                 = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT,
    eDebugPrintf                   = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT,
    eSynchronizationValidation     = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value )
  {
    switch ( value )
    {
      case ValidationFeatureEnableEXT::eGpuAssisted: return "GpuAssisted";
      case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot: return "GpuAssistedReserveBindingSlot";
      case ValidationFeatureEnableEXT::eBestPractices: return "BestPractices";
      case ValidationFeatureEnableEXT::eDebugPrintf: return "DebugPrintf";
      case ValidationFeatureEnableEXT::eSynchronizationValidation: return "SynchronizationValidation";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ValidationFeatureDisableEXT
  {
    eAll                   = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
    eShaders               = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT,
    eThreadSafety          = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,
    eApiParameters         = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
    eObjectLifetimes       = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT,
    eCoreChecks            = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT,
    eUniqueHandles         = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT,
    eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value )
  {
    switch ( value )
    {
      case ValidationFeatureDisableEXT::eAll: return "All";
      case ValidationFeatureDisableEXT::eShaders: return "Shaders";
      case ValidationFeatureDisableEXT::eThreadSafety: return "ThreadSafety";
      case ValidationFeatureDisableEXT::eApiParameters: return "ApiParameters";
      case ValidationFeatureDisableEXT::eObjectLifetimes: return "ObjectLifetimes";
      case ValidationFeatureDisableEXT::eCoreChecks: return "CoreChecks";
      case ValidationFeatureDisableEXT::eUniqueHandles: return "UniqueHandles";
      case ValidationFeatureDisableEXT::eShaderValidationCache: return "ShaderValidationCache";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_NV_cooperative_matrix ===

  enum class ScopeNV
  {
    eDevice      = VK_SCOPE_DEVICE_NV,
    eWorkgroup   = VK_SCOPE_WORKGROUP_NV,
    eSubgroup    = VK_SCOPE_SUBGROUP_NV,
    eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV
  };

  VULKAN_HPP_INLINE std::string to_string( ScopeNV value )
  {
    switch ( value )
    {
      case ScopeNV::eDevice: return "Device";
      case ScopeNV::eWorkgroup: return "Workgroup";
      case ScopeNV::eSubgroup: return "Subgroup";
      case ScopeNV::eQueueFamily: return "QueueFamily";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ComponentTypeNV
  {
    eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV,
    eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV,
    eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV,
    eSint8   = VK_COMPONENT_TYPE_SINT8_NV,
    eSint16  = VK_COMPONENT_TYPE_SINT16_NV,
    eSint32  = VK_COMPONENT_TYPE_SINT32_NV,
    eSint64  = VK_COMPONENT_TYPE_SINT64_NV,
    eUint8   = VK_COMPONENT_TYPE_UINT8_NV,
    eUint16  = VK_COMPONENT_TYPE_UINT16_NV,
    eUint32  = VK_COMPONENT_TYPE_UINT32_NV,
    eUint64  = VK_COMPONENT_TYPE_UINT64_NV
  };

  VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value )
  {
    switch ( value )
    {
      case ComponentTypeNV::eFloat16: return "Float16";
      case ComponentTypeNV::eFloat32: return "Float32";
      case ComponentTypeNV::eFloat64: return "Float64";
      case ComponentTypeNV::eSint8: return "Sint8";
      case ComponentTypeNV::eSint16: return "Sint16";
      case ComponentTypeNV::eSint32: return "Sint32";
      case ComponentTypeNV::eSint64: return "Sint64";
      case ComponentTypeNV::eUint8: return "Uint8";
      case ComponentTypeNV::eUint16: return "Uint16";
      case ComponentTypeNV::eUint32: return "Uint32";
      case ComponentTypeNV::eUint64: return "Uint64";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_NV_coverage_reduction_mode ===

  enum class CoverageReductionModeNV
  {
    eMerge    = VK_COVERAGE_REDUCTION_MODE_MERGE_NV,
    eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV
  };

  VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value )
  {
    switch ( value )
    {
      case CoverageReductionModeNV::eMerge: return "Merge";
      case CoverageReductionModeNV::eTruncate: return "Truncate";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_provoking_vertex ===

  enum class ProvokingVertexModeEXT
  {
    eFirstVertex = VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT,
    eLastVertex  = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( ProvokingVertexModeEXT value )
  {
    switch ( value )
    {
      case ProvokingVertexModeEXT::eFirstVertex: return "FirstVertex";
      case ProvokingVertexModeEXT::eLastVertex: return "LastVertex";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  //=== VK_EXT_full_screen_exclusive ===

  enum class FullScreenExclusiveEXT
  {
    eDefault               = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT,
    eAllowed               = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT,
    eDisallowed            = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT,
    eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value )
  {
    switch ( value )
    {
      case FullScreenExclusiveEXT::eDefault: return "Default";
      case FullScreenExclusiveEXT::eAllowed: return "Allowed";
      case FullScreenExclusiveEXT::eDisallowed: return "Disallowed";
      case FullScreenExclusiveEXT::eApplicationControlled: return "ApplicationControlled";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  //=== VK_EXT_line_rasterization ===

  enum class LineRasterizationModeEXT
  {
    eDefault           = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT,
    eRectangular       = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT,
    eBresenham         = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT,
    eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value )
  {
    switch ( value )
    {
      case LineRasterizationModeEXT::eDefault: return "Default";
      case LineRasterizationModeEXT::eRectangular: return "Rectangular";
      case LineRasterizationModeEXT::eBresenham: return "Bresenham";
      case LineRasterizationModeEXT::eRectangularSmooth: return "RectangularSmooth";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_KHR_pipeline_executable_properties ===

  enum class PipelineExecutableStatisticFormatKHR
  {
    eBool32  = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
    eInt64   = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR,
    eUint64  = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR,
    eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value )
  {
    switch ( value )
    {
      case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32";
      case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64";
      case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64";
      case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_NV_device_generated_commands ===

  enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV
  {
    eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value )
  {
    switch ( value )
    {
      case IndirectStateFlagBitsNV::eFlagFrontface: return "FlagFrontface";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class IndirectCommandsTokenTypeNV
  {
    eShaderGroup  = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV,
    eStateFlags   = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV,
    eIndexBuffer  = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV,
    eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV,
    ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV,
    eDrawIndexed  = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,
    eDraw         = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,
    eDrawTasks    = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV
  };

  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value )
  {
    switch ( value )
    {
      case IndirectCommandsTokenTypeNV::eShaderGroup: return "ShaderGroup";
      case IndirectCommandsTokenTypeNV::eStateFlags: return "StateFlags";
      case IndirectCommandsTokenTypeNV::eIndexBuffer: return "IndexBuffer";
      case IndirectCommandsTokenTypeNV::eVertexBuffer: return "VertexBuffer";
      case IndirectCommandsTokenTypeNV::ePushConstant: return "PushConstant";
      case IndirectCommandsTokenTypeNV::eDrawIndexed: return "DrawIndexed";
      case IndirectCommandsTokenTypeNV::eDraw: return "Draw";
      case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV
  {
    eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV,
    eIndexedSequences   = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV,
    eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value )
  {
    switch ( value )
    {
      case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess: return "ExplicitPreprocess";
      case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences: return "IndexedSequences";
      case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences: return "UnorderedSequences";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_device_memory_report ===

  enum class DeviceMemoryReportEventTypeEXT
  {
    eAllocate         = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT,
    eFree             = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT,
    eImport           = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT,
    eUnimport         = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT,
    eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT
  };

  VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value )
  {
    switch ( value )
    {
      case DeviceMemoryReportEventTypeEXT::eAllocate: return "Allocate";
      case DeviceMemoryReportEventTypeEXT::eFree: return "Free";
      case DeviceMemoryReportEventTypeEXT::eImport: return "Import";
      case DeviceMemoryReportEventTypeEXT::eUnimport: return "Unimport";
      case DeviceMemoryReportEventTypeEXT::eAllocationFailed: return "AllocationFailed";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_EXT_private_data ===

  enum class PrivateDataSlotCreateFlagBitsEXT : VkPrivateDataSlotCreateFlagsEXT
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBitsEXT )
  {
    return "(void)";
  }

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_KHR_video_encode_queue ===

  enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR
  {
    eDefault   = VK_VIDEO_ENCODE_DEFAULT_KHR,
    eReserved0 = VK_VIDEO_ENCODE_RESERVED_0_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoEncodeFlagBitsKHR::eDefault: return "Default";
      case VideoEncodeFlagBitsKHR::eReserved0: return "Reserved0";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR
  {
    eDefault = VK_VIDEO_ENCODE_RATE_CONTROL_DEFAULT_KHR,
    eReset   = VK_VIDEO_ENCODE_RATE_CONTROL_RESET_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoEncodeRateControlFlagBitsKHR::eDefault: return "Default";
      case VideoEncodeRateControlFlagBitsKHR::eReset: return "Reset";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR
  {
    eNone = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR,
    eCbr  = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR,
    eVbr  = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagBitsKHR value )
  {
    switch ( value )
    {
      case VideoEncodeRateControlModeFlagBitsKHR::eNone: return "None";
      case VideoEncodeRateControlModeFlagBitsKHR::eCbr: return "Cbr";
      case VideoEncodeRateControlModeFlagBitsKHR::eVbr: return "Vbr";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  //=== VK_NV_device_diagnostics_config ===

  enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV
  {
    eEnableShaderDebugInfo      = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV,
    eEnableResourceTracking     = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV,
    eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value )
  {
    switch ( value )
    {
      case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo: return "EnableShaderDebugInfo";
      case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking: return "EnableResourceTracking";
      case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints: return "EnableAutomaticCheckpoints";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_KHR_synchronization2 ===

  enum class PipelineStageFlagBits2KHR : VkPipelineStageFlags2KHR
  {
    eNone                         = VK_PIPELINE_STAGE_2_NONE_KHR,
    eTopOfPipe                    = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR,
    eDrawIndirect                 = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR,
    eVertexInput                  = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR,
    eVertexShader                 = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR,
    eTessellationControlShader    = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR,
    eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR,
    eGeometryShader               = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR,
    eFragmentShader               = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR,
    eEarlyFragmentTests           = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR,
    eLateFragmentTests            = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR,
    eColorAttachmentOutput        = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR,
    eComputeShader                = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR,
    eAllTransfer                  = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR,
    eBottomOfPipe                 = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR,
    eHost                         = VK_PIPELINE_STAGE_2_HOST_BIT_KHR,
    eAllGraphics                  = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR,
    eAllCommands                  = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR,
    eCopy                         = VK_PIPELINE_STAGE_2_COPY_BIT_KHR,
    eResolve                      = VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR,
    eBlit                         = VK_PIPELINE_STAGE_2_BLIT_BIT_KHR,
    eClear                        = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR,
    eIndexInput                   = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR,
    eVertexAttributeInput         = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR,
    ePreRasterizationShaders      = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoDecode = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR,
    eVideoEncode = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eTransformFeedbackEXT          = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT,
    eConditionalRenderingEXT       = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT,
    eCommandPreprocessNV           = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV,
    eFragmentShadingRateAttachment = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
    eAccelerationStructureBuild    = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
    eRayTracingShader              = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR,
    eFragmentDensityProcessEXT     = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
    eTaskShaderNV                  = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV,
    eMeshShaderNV                  = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
    eAccelerationStructureBuildNV  = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
    eRayTracingShaderNV            = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV,
    eShadingRateImageNV            = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV,
    eTransfer                      = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits2KHR value )
  {
    switch ( value )
    {
      case PipelineStageFlagBits2KHR::eNone: return "None";
      case PipelineStageFlagBits2KHR::eTopOfPipe: return "TopOfPipe";
      case PipelineStageFlagBits2KHR::eDrawIndirect: return "DrawIndirect";
      case PipelineStageFlagBits2KHR::eVertexInput: return "VertexInput";
      case PipelineStageFlagBits2KHR::eVertexShader: return "VertexShader";
      case PipelineStageFlagBits2KHR::eTessellationControlShader: return "TessellationControlShader";
      case PipelineStageFlagBits2KHR::eTessellationEvaluationShader: return "TessellationEvaluationShader";
      case PipelineStageFlagBits2KHR::eGeometryShader: return "GeometryShader";
      case PipelineStageFlagBits2KHR::eFragmentShader: return "FragmentShader";
      case PipelineStageFlagBits2KHR::eEarlyFragmentTests: return "EarlyFragmentTests";
      case PipelineStageFlagBits2KHR::eLateFragmentTests: return "LateFragmentTests";
      case PipelineStageFlagBits2KHR::eColorAttachmentOutput: return "ColorAttachmentOutput";
      case PipelineStageFlagBits2KHR::eComputeShader: return "ComputeShader";
      case PipelineStageFlagBits2KHR::eAllTransfer: return "AllTransfer";
      case PipelineStageFlagBits2KHR::eBottomOfPipe: return "BottomOfPipe";
      case PipelineStageFlagBits2KHR::eHost: return "Host";
      case PipelineStageFlagBits2KHR::eAllGraphics: return "AllGraphics";
      case PipelineStageFlagBits2KHR::eAllCommands: return "AllCommands";
      case PipelineStageFlagBits2KHR::eCopy: return "Copy";
      case PipelineStageFlagBits2KHR::eResolve: return "Resolve";
      case PipelineStageFlagBits2KHR::eBlit: return "Blit";
      case PipelineStageFlagBits2KHR::eClear: return "Clear";
      case PipelineStageFlagBits2KHR::eIndexInput: return "IndexInput";
      case PipelineStageFlagBits2KHR::eVertexAttributeInput: return "VertexAttributeInput";
      case PipelineStageFlagBits2KHR::ePreRasterizationShaders: return "PreRasterizationShaders";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case PipelineStageFlagBits2KHR::eVideoDecode: return "VideoDecode";
      case PipelineStageFlagBits2KHR::eVideoEncode: return "VideoEncode";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case PipelineStageFlagBits2KHR::eTransformFeedbackEXT: return "TransformFeedbackEXT";
      case PipelineStageFlagBits2KHR::eConditionalRenderingEXT: return "ConditionalRenderingEXT";
      case PipelineStageFlagBits2KHR::eCommandPreprocessNV: return "CommandPreprocessNV";
      case PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment";
      case PipelineStageFlagBits2KHR::eAccelerationStructureBuild: return "AccelerationStructureBuild";
      case PipelineStageFlagBits2KHR::eRayTracingShader: return "RayTracingShader";
      case PipelineStageFlagBits2KHR::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT";
      case PipelineStageFlagBits2KHR::eTaskShaderNV: return "TaskShaderNV";
      case PipelineStageFlagBits2KHR::eMeshShaderNV: return "MeshShaderNV";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class AccessFlagBits2KHR : VkAccessFlags2KHR
  {
    eNone                        = VK_ACCESS_2_NONE_KHR,
    eIndirectCommandRead         = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR,
    eIndexRead                   = VK_ACCESS_2_INDEX_READ_BIT_KHR,
    eVertexAttributeRead         = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR,
    eUniformRead                 = VK_ACCESS_2_UNIFORM_READ_BIT_KHR,
    eInputAttachmentRead         = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR,
    eShaderRead                  = VK_ACCESS_2_SHADER_READ_BIT_KHR,
    eShaderWrite                 = VK_ACCESS_2_SHADER_WRITE_BIT_KHR,
    eColorAttachmentRead         = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR,
    eColorAttachmentWrite        = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR,
    eDepthStencilAttachmentRead  = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR,
    eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR,
    eTransferRead                = VK_ACCESS_2_TRANSFER_READ_BIT_KHR,
    eTransferWrite               = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR,
    eHostRead                    = VK_ACCESS_2_HOST_READ_BIT_KHR,
    eHostWrite                   = VK_ACCESS_2_HOST_WRITE_BIT_KHR,
    eMemoryRead                  = VK_ACCESS_2_MEMORY_READ_BIT_KHR,
    eMemoryWrite                 = VK_ACCESS_2_MEMORY_WRITE_BIT_KHR,
    eShaderSampledRead           = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR,
    eShaderStorageRead           = VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR,
    eShaderStorageWrite          = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    eVideoDecodeRead  = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR,
    eVideoDecodeWrite = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR,
    eVideoEncodeRead  = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR,
    eVideoEncodeWrite = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    eTransformFeedbackWriteEXT         = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
    eTransformFeedbackCounterReadEXT   = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
    eTransformFeedbackCounterWriteEXT  = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
    eConditionalRenderingReadEXT       = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT,
    eCommandPreprocessReadNV           = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV,
    eCommandPreprocessWriteNV          = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV,
    eFragmentShadingRateAttachmentRead = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
    eAccelerationStructureRead         = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR,
    eAccelerationStructureWrite        = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
    eFragmentDensityMapReadEXT         = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
    eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
    eAccelerationStructureReadNV       = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV,
    eAccelerationStructureWriteNV      = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
    eShadingRateImageReadNV            = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV
  };

  VULKAN_HPP_INLINE std::string to_string( AccessFlagBits2KHR value )
  {
    switch ( value )
    {
      case AccessFlagBits2KHR::eNone: return "None";
      case AccessFlagBits2KHR::eIndirectCommandRead: return "IndirectCommandRead";
      case AccessFlagBits2KHR::eIndexRead: return "IndexRead";
      case AccessFlagBits2KHR::eVertexAttributeRead: return "VertexAttributeRead";
      case AccessFlagBits2KHR::eUniformRead: return "UniformRead";
      case AccessFlagBits2KHR::eInputAttachmentRead: return "InputAttachmentRead";
      case AccessFlagBits2KHR::eShaderRead: return "ShaderRead";
      case AccessFlagBits2KHR::eShaderWrite: return "ShaderWrite";
      case AccessFlagBits2KHR::eColorAttachmentRead: return "ColorAttachmentRead";
      case AccessFlagBits2KHR::eColorAttachmentWrite: return "ColorAttachmentWrite";
      case AccessFlagBits2KHR::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
      case AccessFlagBits2KHR::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
      case AccessFlagBits2KHR::eTransferRead: return "TransferRead";
      case AccessFlagBits2KHR::eTransferWrite: return "TransferWrite";
      case AccessFlagBits2KHR::eHostRead: return "HostRead";
      case AccessFlagBits2KHR::eHostWrite: return "HostWrite";
      case AccessFlagBits2KHR::eMemoryRead: return "MemoryRead";
      case AccessFlagBits2KHR::eMemoryWrite: return "MemoryWrite";
      case AccessFlagBits2KHR::eShaderSampledRead: return "ShaderSampledRead";
      case AccessFlagBits2KHR::eShaderStorageRead: return "ShaderStorageRead";
      case AccessFlagBits2KHR::eShaderStorageWrite: return "ShaderStorageWrite";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      case AccessFlagBits2KHR::eVideoDecodeRead: return "VideoDecodeRead";
      case AccessFlagBits2KHR::eVideoDecodeWrite: return "VideoDecodeWrite";
      case AccessFlagBits2KHR::eVideoEncodeRead: return "VideoEncodeRead";
      case AccessFlagBits2KHR::eVideoEncodeWrite: return "VideoEncodeWrite";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      case AccessFlagBits2KHR::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT";
      case AccessFlagBits2KHR::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT";
      case AccessFlagBits2KHR::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT";
      case AccessFlagBits2KHR::eConditionalRenderingReadEXT: return "ConditionalRenderingReadEXT";
      case AccessFlagBits2KHR::eCommandPreprocessReadNV: return "CommandPreprocessReadNV";
      case AccessFlagBits2KHR::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV";
      case AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead: return "FragmentShadingRateAttachmentRead";
      case AccessFlagBits2KHR::eAccelerationStructureRead: return "AccelerationStructureRead";
      case AccessFlagBits2KHR::eAccelerationStructureWrite: return "AccelerationStructureWrite";
      case AccessFlagBits2KHR::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT";
      case AccessFlagBits2KHR::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class SubmitFlagBitsKHR : VkSubmitFlagsKHR
  {
    eProtected = VK_SUBMIT_PROTECTED_BIT_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( SubmitFlagBitsKHR value )
  {
    switch ( value )
    {
      case SubmitFlagBitsKHR::eProtected: return "Protected";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_NV_fragment_shading_rate_enums ===

  enum class FragmentShadingRateNV
  {
    e1InvocationPerPixel     = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV,
    e1InvocationPer1X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV,
    e1InvocationPer2X1Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV,
    e1InvocationPer2X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV,
    e1InvocationPer2X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV,
    e1InvocationPer4X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV,
    e1InvocationPer4X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV,
    e2InvocationsPerPixel    = VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV,
    e4InvocationsPerPixel    = VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV,
    e8InvocationsPerPixel    = VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV,
    e16InvocationsPerPixel   = VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV,
    eNoInvocations           = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV
  };

  VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value )
  {
    switch ( value )
    {
      case FragmentShadingRateNV::e1InvocationPerPixel: return "1InvocationPerPixel";
      case FragmentShadingRateNV::e1InvocationPer1X2Pixels: return "1InvocationPer1X2Pixels";
      case FragmentShadingRateNV::e1InvocationPer2X1Pixels: return "1InvocationPer2X1Pixels";
      case FragmentShadingRateNV::e1InvocationPer2X2Pixels: return "1InvocationPer2X2Pixels";
      case FragmentShadingRateNV::e1InvocationPer2X4Pixels: return "1InvocationPer2X4Pixels";
      case FragmentShadingRateNV::e1InvocationPer4X2Pixels: return "1InvocationPer4X2Pixels";
      case FragmentShadingRateNV::e1InvocationPer4X4Pixels: return "1InvocationPer4X4Pixels";
      case FragmentShadingRateNV::e2InvocationsPerPixel: return "2InvocationsPerPixel";
      case FragmentShadingRateNV::e4InvocationsPerPixel: return "4InvocationsPerPixel";
      case FragmentShadingRateNV::e8InvocationsPerPixel: return "8InvocationsPerPixel";
      case FragmentShadingRateNV::e16InvocationsPerPixel: return "16InvocationsPerPixel";
      case FragmentShadingRateNV::eNoInvocations: return "NoInvocations";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class FragmentShadingRateTypeNV
  {
    eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV,
    eEnums        = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV
  };

  VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateTypeNV value )
  {
    switch ( value )
    {
      case FragmentShadingRateTypeNV::eFragmentSize: return "FragmentSize";
      case FragmentShadingRateTypeNV::eEnums: return "Enums";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  //=== VK_KHR_ray_tracing_pipeline ===

  enum class RayTracingShaderGroupTypeKHR
  {
    eGeneral            = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
    eTrianglesHitGroup  = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
    eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR
  };
  using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR;

  VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value )
  {
    switch ( value )
    {
      case RayTracingShaderGroupTypeKHR::eGeneral: return "General";
      case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup";
      case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  enum class ShaderGroupShaderKHR
  {
    eGeneral      = VK_SHADER_GROUP_SHADER_GENERAL_KHR,
    eClosestHit   = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR,
    eAnyHit       = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR,
    eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR
  };

  VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value )
  {
    switch ( value )
    {
      case ShaderGroupShaderKHR::eGeneral: return "General";
      case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit";
      case ShaderGroupShaderKHR::eAnyHit: return "AnyHit";
      case ShaderGroupShaderKHR::eIntersection: return "Intersection";
      default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
    }
  }

  template <ObjectType value>
  struct cpp_type
  {};

  template <typename T>
  struct IndexTypeValue
  {};

  template <>
  struct IndexTypeValue<uint16_t>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16;
  };

  template <>
  struct CppType<IndexType, IndexType::eUint16>
  {
    using Type = uint16_t;
  };

  template <>
  struct IndexTypeValue<uint32_t>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32;
  };

  template <>
  struct CppType<IndexType, IndexType::eUint32>
  {
    using Type = uint32_t;
  };

  template <>
  struct IndexTypeValue<uint8_t>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT;
  };

  template <>
  struct CppType<IndexType, IndexType::eUint8EXT>
  {
    using Type = uint8_t;
  };

  //================
  //=== BITMASKs ===
  //================

  //=== VK_VERSION_1_0 ===

  using FormatFeatureFlags = Flags<FormatFeatureFlagBits>;

  template <>
  struct FlagTraits<FormatFeatureFlagBits>
  {
    enum : VkFlags
    {
      allFlags =
        VkFlags( FormatFeatureFlagBits::eSampledImage ) | VkFlags( FormatFeatureFlagBits::eStorageImage ) |
        VkFlags( FormatFeatureFlagBits::eStorageImageAtomic ) | VkFlags( FormatFeatureFlagBits::eUniformTexelBuffer ) |
        VkFlags( FormatFeatureFlagBits::eStorageTexelBuffer ) |
        VkFlags( FormatFeatureFlagBits::eStorageTexelBufferAtomic ) | VkFlags( FormatFeatureFlagBits::eVertexBuffer ) |
        VkFlags( FormatFeatureFlagBits::eColorAttachment ) | VkFlags( FormatFeatureFlagBits::eColorAttachmentBlend ) |
        VkFlags( FormatFeatureFlagBits::eDepthStencilAttachment ) | VkFlags( FormatFeatureFlagBits::eBlitSrc ) |
        VkFlags( FormatFeatureFlagBits::eBlitDst ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterLinear ) |
        VkFlags( FormatFeatureFlagBits::eTransferSrc ) | VkFlags( FormatFeatureFlagBits::eTransferDst ) |
        VkFlags( FormatFeatureFlagBits::eMidpointChromaSamples ) |
        VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) |
        VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) |
        VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) |
        VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) |
        VkFlags( FormatFeatureFlagBits::eDisjoint ) | VkFlags( FormatFeatureFlagBits::eCositedChromaSamples ) |
        VkFlags( FormatFeatureFlagBits::eSampledImageFilterMinmax ) |
        VkFlags( FormatFeatureFlagBits::eSampledImageFilterCubicIMG )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
        | VkFlags( FormatFeatureFlagBits::eVideoDecodeOutputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoDecodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
        | VkFlags( FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) |
        VkFlags( FormatFeatureFlagBits::eFragmentDensityMapEXT ) |
        VkFlags( FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
        | VkFlags( FormatFeatureFlagBits::eVideoEncodeInputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoEncodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0,
                                                                       FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return FormatFeatureFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&(FormatFeatureFlagBits bit0,
                                                                      FormatFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return FormatFeatureFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0,
                                                                       FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return FormatFeatureFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( FormatFeatureFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & FormatFeatureFlagBits::eSampledImage )
      result += "SampledImage | ";
    if ( value & FormatFeatureFlagBits::eStorageImage )
      result += "StorageImage | ";
    if ( value & FormatFeatureFlagBits::eStorageImageAtomic )
      result += "StorageImageAtomic | ";
    if ( value & FormatFeatureFlagBits::eUniformTexelBuffer )
      result += "UniformTexelBuffer | ";
    if ( value & FormatFeatureFlagBits::eStorageTexelBuffer )
      result += "StorageTexelBuffer | ";
    if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic )
      result += "StorageTexelBufferAtomic | ";
    if ( value & FormatFeatureFlagBits::eVertexBuffer )
      result += "VertexBuffer | ";
    if ( value & FormatFeatureFlagBits::eColorAttachment )
      result += "ColorAttachment | ";
    if ( value & FormatFeatureFlagBits::eColorAttachmentBlend )
      result += "ColorAttachmentBlend | ";
    if ( value & FormatFeatureFlagBits::eDepthStencilAttachment )
      result += "DepthStencilAttachment | ";
    if ( value & FormatFeatureFlagBits::eBlitSrc )
      result += "BlitSrc | ";
    if ( value & FormatFeatureFlagBits::eBlitDst )
      result += "BlitDst | ";
    if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear )
      result += "SampledImageFilterLinear | ";
    if ( value & FormatFeatureFlagBits::eTransferSrc )
      result += "TransferSrc | ";
    if ( value & FormatFeatureFlagBits::eTransferDst )
      result += "TransferDst | ";
    if ( value & FormatFeatureFlagBits::eMidpointChromaSamples )
      result += "MidpointChromaSamples | ";
    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter )
      result += "SampledImageYcbcrConversionLinearFilter | ";
    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter )
      result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit )
      result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
    if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable )
      result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
    if ( value & FormatFeatureFlagBits::eDisjoint )
      result += "Disjoint | ";
    if ( value & FormatFeatureFlagBits::eCositedChromaSamples )
      result += "CositedChromaSamples | ";
    if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax )
      result += "SampledImageFilterMinmax | ";
    if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG )
      result += "SampledImageFilterCubicIMG | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR )
      result += "VideoDecodeOutputKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR )
      result += "VideoDecodeDpbKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR )
      result += "AccelerationStructureVertexBufferKHR | ";
    if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT )
      result += "FragmentDensityMapEXT | ";
    if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR )
      result += "FragmentShadingRateAttachmentKHR | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR )
      result += "VideoEncodeInputKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR )
      result += "VideoEncodeDpbKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ImageCreateFlags = Flags<ImageCreateFlagBits>;

  template <>
  struct FlagTraits<ImageCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ImageCreateFlagBits::eSparseBinding ) | VkFlags( ImageCreateFlagBits::eSparseResidency ) |
                 VkFlags( ImageCreateFlagBits::eSparseAliased ) | VkFlags( ImageCreateFlagBits::eMutableFormat ) |
                 VkFlags( ImageCreateFlagBits::eCubeCompatible ) | VkFlags( ImageCreateFlagBits::eAlias ) |
                 VkFlags( ImageCreateFlagBits::eSplitInstanceBindRegions ) |
                 VkFlags( ImageCreateFlagBits::e2DArrayCompatible ) |
                 VkFlags( ImageCreateFlagBits::eBlockTexelViewCompatible ) |
                 VkFlags( ImageCreateFlagBits::eExtendedUsage ) | VkFlags( ImageCreateFlagBits::eProtected ) |
                 VkFlags( ImageCreateFlagBits::eDisjoint ) | VkFlags( ImageCreateFlagBits::eCornerSampledNV ) |
                 VkFlags( ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) |
                 VkFlags( ImageCreateFlagBits::eSubsampledEXT )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0,
                                                                     ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ImageCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&(ImageCreateFlagBits bit0,
                                                                    ImageCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ImageCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0,
                                                                     ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ImageCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( ImageCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ImageCreateFlagBits::eSparseBinding )
      result += "SparseBinding | ";
    if ( value & ImageCreateFlagBits::eSparseResidency )
      result += "SparseResidency | ";
    if ( value & ImageCreateFlagBits::eSparseAliased )
      result += "SparseAliased | ";
    if ( value & ImageCreateFlagBits::eMutableFormat )
      result += "MutableFormat | ";
    if ( value & ImageCreateFlagBits::eCubeCompatible )
      result += "CubeCompatible | ";
    if ( value & ImageCreateFlagBits::eAlias )
      result += "Alias | ";
    if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions )
      result += "SplitInstanceBindRegions | ";
    if ( value & ImageCreateFlagBits::e2DArrayCompatible )
      result += "2DArrayCompatible | ";
    if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible )
      result += "BlockTexelViewCompatible | ";
    if ( value & ImageCreateFlagBits::eExtendedUsage )
      result += "ExtendedUsage | ";
    if ( value & ImageCreateFlagBits::eProtected )
      result += "Protected | ";
    if ( value & ImageCreateFlagBits::eDisjoint )
      result += "Disjoint | ";
    if ( value & ImageCreateFlagBits::eCornerSampledNV )
      result += "CornerSampledNV | ";
    if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT )
      result += "SampleLocationsCompatibleDepthEXT | ";
    if ( value & ImageCreateFlagBits::eSubsampledEXT )
      result += "SubsampledEXT | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ImageUsageFlags = Flags<ImageUsageFlagBits>;

  template <>
  struct FlagTraits<ImageUsageFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ImageUsageFlagBits::eTransferSrc ) | VkFlags( ImageUsageFlagBits::eTransferDst ) |
                 VkFlags( ImageUsageFlagBits::eSampled ) | VkFlags( ImageUsageFlagBits::eStorage ) |
                 VkFlags( ImageUsageFlagBits::eColorAttachment ) |
                 VkFlags( ImageUsageFlagBits::eDepthStencilAttachment ) |
                 VkFlags( ImageUsageFlagBits::eTransientAttachment ) | VkFlags( ImageUsageFlagBits::eInputAttachment )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
                 | VkFlags( ImageUsageFlagBits::eVideoDecodeDstKHR ) |
                 VkFlags( ImageUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( ImageUsageFlagBits::eVideoDecodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
                 | VkFlags( ImageUsageFlagBits::eFragmentDensityMapEXT ) |
                 VkFlags( ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
                 | VkFlags( ImageUsageFlagBits::eVideoEncodeDstKHR ) |
                 VkFlags( ImageUsageFlagBits::eVideoEncodeSrcKHR ) | VkFlags( ImageUsageFlagBits::eVideoEncodeDpbKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0,
                                                                    ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ImageUsageFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&(ImageUsageFlagBits bit0,
                                                                   ImageUsageFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ImageUsageFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0,
                                                                    ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ImageUsageFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( ImageUsageFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ImageUsageFlagBits::eTransferSrc )
      result += "TransferSrc | ";
    if ( value & ImageUsageFlagBits::eTransferDst )
      result += "TransferDst | ";
    if ( value & ImageUsageFlagBits::eSampled )
      result += "Sampled | ";
    if ( value & ImageUsageFlagBits::eStorage )
      result += "Storage | ";
    if ( value & ImageUsageFlagBits::eColorAttachment )
      result += "ColorAttachment | ";
    if ( value & ImageUsageFlagBits::eDepthStencilAttachment )
      result += "DepthStencilAttachment | ";
    if ( value & ImageUsageFlagBits::eTransientAttachment )
      result += "TransientAttachment | ";
    if ( value & ImageUsageFlagBits::eInputAttachment )
      result += "InputAttachment | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR )
      result += "VideoDecodeDstKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR )
      result += "VideoDecodeSrcKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR )
      result += "VideoDecodeDpbKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT )
      result += "FragmentDensityMapEXT | ";
    if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR )
      result += "FragmentShadingRateAttachmentKHR | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR )
      result += "VideoEncodeDstKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR )
      result += "VideoEncodeSrcKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR )
      result += "VideoEncodeDpbKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using InstanceCreateFlags = Flags<InstanceCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags )
  {
    return "{}";
  }

  using MemoryHeapFlags = Flags<MemoryHeapFlagBits>;

  template <>
  struct FlagTraits<MemoryHeapFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( MemoryHeapFlagBits::eDeviceLocal ) | VkFlags( MemoryHeapFlagBits::eMultiInstance )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0,
                                                                    MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return MemoryHeapFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&(MemoryHeapFlagBits bit0,
                                                                   MemoryHeapFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return MemoryHeapFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0,
                                                                    MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return MemoryHeapFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( MemoryHeapFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & MemoryHeapFlagBits::eDeviceLocal )
      result += "DeviceLocal | ";
    if ( value & MemoryHeapFlagBits::eMultiInstance )
      result += "MultiInstance | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits>;

  template <>
  struct FlagTraits<MemoryPropertyFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( MemoryPropertyFlagBits::eDeviceLocal ) | VkFlags( MemoryPropertyFlagBits::eHostVisible ) |
                 VkFlags( MemoryPropertyFlagBits::eHostCoherent ) | VkFlags( MemoryPropertyFlagBits::eHostCached ) |
                 VkFlags( MemoryPropertyFlagBits::eLazilyAllocated ) | VkFlags( MemoryPropertyFlagBits::eProtected ) |
                 VkFlags( MemoryPropertyFlagBits::eDeviceCoherentAMD ) |
                 VkFlags( MemoryPropertyFlagBits::eDeviceUncachedAMD )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags
                                         operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return MemoryPropertyFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&(MemoryPropertyFlagBits bit0,
                                                                       MemoryPropertyFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return MemoryPropertyFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags
                                         operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return MemoryPropertyFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( MemoryPropertyFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & MemoryPropertyFlagBits::eDeviceLocal )
      result += "DeviceLocal | ";
    if ( value & MemoryPropertyFlagBits::eHostVisible )
      result += "HostVisible | ";
    if ( value & MemoryPropertyFlagBits::eHostCoherent )
      result += "HostCoherent | ";
    if ( value & MemoryPropertyFlagBits::eHostCached )
      result += "HostCached | ";
    if ( value & MemoryPropertyFlagBits::eLazilyAllocated )
      result += "LazilyAllocated | ";
    if ( value & MemoryPropertyFlagBits::eProtected )
      result += "Protected | ";
    if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD )
      result += "DeviceCoherentAMD | ";
    if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD )
      result += "DeviceUncachedAMD | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using QueueFlags = Flags<QueueFlagBits>;

  template <>
  struct FlagTraits<QueueFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( QueueFlagBits::eGraphics ) | VkFlags( QueueFlagBits::eCompute ) |
                 VkFlags( QueueFlagBits::eTransfer ) | VkFlags( QueueFlagBits::eSparseBinding ) |
                 VkFlags( QueueFlagBits::eProtected )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
                 | VkFlags( QueueFlagBits::eVideoDecodeKHR ) | VkFlags( QueueFlagBits::eVideoEncodeKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0,
                                                               QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return QueueFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&(QueueFlagBits bit0, QueueFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return QueueFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0,
                                                               QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return QueueFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( QueueFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( QueueFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & QueueFlagBits::eGraphics )
      result += "Graphics | ";
    if ( value & QueueFlagBits::eCompute )
      result += "Compute | ";
    if ( value & QueueFlagBits::eTransfer )
      result += "Transfer | ";
    if ( value & QueueFlagBits::eSparseBinding )
      result += "SparseBinding | ";
    if ( value & QueueFlagBits::eProtected )
      result += "Protected | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & QueueFlagBits::eVideoDecodeKHR )
      result += "VideoDecodeKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & QueueFlagBits::eVideoEncodeKHR )
      result += "VideoEncodeKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using SampleCountFlags = Flags<SampleCountFlagBits>;

  template <>
  struct FlagTraits<SampleCountFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SampleCountFlagBits::e1 ) | VkFlags( SampleCountFlagBits::e2 ) |
                 VkFlags( SampleCountFlagBits::e4 ) | VkFlags( SampleCountFlagBits::e8 ) |
                 VkFlags( SampleCountFlagBits::e16 ) | VkFlags( SampleCountFlagBits::e32 ) |
                 VkFlags( SampleCountFlagBits::e64 )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0,
                                                                     SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SampleCountFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&(SampleCountFlagBits bit0,
                                                                    SampleCountFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return SampleCountFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0,
                                                                     SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SampleCountFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( SampleCountFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SampleCountFlagBits::e1 )
      result += "1 | ";
    if ( value & SampleCountFlagBits::e2 )
      result += "2 | ";
    if ( value & SampleCountFlagBits::e4 )
      result += "4 | ";
    if ( value & SampleCountFlagBits::e8 )
      result += "8 | ";
    if ( value & SampleCountFlagBits::e16 )
      result += "16 | ";
    if ( value & SampleCountFlagBits::e32 )
      result += "32 | ";
    if ( value & SampleCountFlagBits::e64 )
      result += "64 | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using DeviceCreateFlags = Flags<DeviceCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags )
  {
    return "{}";
  }

  using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits>;

  template <>
  struct FlagTraits<DeviceQueueCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DeviceQueueCreateFlagBits::eProtected )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags
                                         operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DeviceQueueCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags
                                         operator&(DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return DeviceQueueCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags
                                         operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DeviceQueueCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( DeviceQueueCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DeviceQueueCreateFlagBits::eProtected )
      result += "Protected | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using PipelineStageFlags = Flags<PipelineStageFlagBits>;

  template <>
  struct FlagTraits<PipelineStageFlagBits>
  {
    enum : VkFlags
    {
      allFlags =
        VkFlags( PipelineStageFlagBits::eTopOfPipe ) | VkFlags( PipelineStageFlagBits::eDrawIndirect ) |
        VkFlags( PipelineStageFlagBits::eVertexInput ) | VkFlags( PipelineStageFlagBits::eVertexShader ) |
        VkFlags( PipelineStageFlagBits::eTessellationControlShader ) |
        VkFlags( PipelineStageFlagBits::eTessellationEvaluationShader ) |
        VkFlags( PipelineStageFlagBits::eGeometryShader ) | VkFlags( PipelineStageFlagBits::eFragmentShader ) |
        VkFlags( PipelineStageFlagBits::eEarlyFragmentTests ) | VkFlags( PipelineStageFlagBits::eLateFragmentTests ) |
        VkFlags( PipelineStageFlagBits::eColorAttachmentOutput ) | VkFlags( PipelineStageFlagBits::eComputeShader ) |
        VkFlags( PipelineStageFlagBits::eTransfer ) | VkFlags( PipelineStageFlagBits::eBottomOfPipe ) |
        VkFlags( PipelineStageFlagBits::eHost ) | VkFlags( PipelineStageFlagBits::eAllGraphics ) |
        VkFlags( PipelineStageFlagBits::eAllCommands ) | VkFlags( PipelineStageFlagBits::eTransformFeedbackEXT ) |
        VkFlags( PipelineStageFlagBits::eConditionalRenderingEXT ) |
        VkFlags( PipelineStageFlagBits::eAccelerationStructureBuildKHR ) |
        VkFlags( PipelineStageFlagBits::eRayTracingShaderKHR ) | VkFlags( PipelineStageFlagBits::eTaskShaderNV ) |
        VkFlags( PipelineStageFlagBits::eMeshShaderNV ) | VkFlags( PipelineStageFlagBits::eFragmentDensityProcessEXT ) |
        VkFlags( PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) |
        VkFlags( PipelineStageFlagBits::eCommandPreprocessNV ) | VkFlags( PipelineStageFlagBits::eNoneKHR )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0,
                                                                       PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineStageFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&(PipelineStageFlagBits bit0,
                                                                      PipelineStageFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return PipelineStageFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0,
                                                                       PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineStageFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( PipelineStageFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & PipelineStageFlagBits::eTopOfPipe )
      result += "TopOfPipe | ";
    if ( value & PipelineStageFlagBits::eDrawIndirect )
      result += "DrawIndirect | ";
    if ( value & PipelineStageFlagBits::eVertexInput )
      result += "VertexInput | ";
    if ( value & PipelineStageFlagBits::eVertexShader )
      result += "VertexShader | ";
    if ( value & PipelineStageFlagBits::eTessellationControlShader )
      result += "TessellationControlShader | ";
    if ( value & PipelineStageFlagBits::eTessellationEvaluationShader )
      result += "TessellationEvaluationShader | ";
    if ( value & PipelineStageFlagBits::eGeometryShader )
      result += "GeometryShader | ";
    if ( value & PipelineStageFlagBits::eFragmentShader )
      result += "FragmentShader | ";
    if ( value & PipelineStageFlagBits::eEarlyFragmentTests )
      result += "EarlyFragmentTests | ";
    if ( value & PipelineStageFlagBits::eLateFragmentTests )
      result += "LateFragmentTests | ";
    if ( value & PipelineStageFlagBits::eColorAttachmentOutput )
      result += "ColorAttachmentOutput | ";
    if ( value & PipelineStageFlagBits::eComputeShader )
      result += "ComputeShader | ";
    if ( value & PipelineStageFlagBits::eTransfer )
      result += "Transfer | ";
    if ( value & PipelineStageFlagBits::eBottomOfPipe )
      result += "BottomOfPipe | ";
    if ( value & PipelineStageFlagBits::eHost )
      result += "Host | ";
    if ( value & PipelineStageFlagBits::eAllGraphics )
      result += "AllGraphics | ";
    if ( value & PipelineStageFlagBits::eAllCommands )
      result += "AllCommands | ";
    if ( value & PipelineStageFlagBits::eTransformFeedbackEXT )
      result += "TransformFeedbackEXT | ";
    if ( value & PipelineStageFlagBits::eConditionalRenderingEXT )
      result += "ConditionalRenderingEXT | ";
    if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR )
      result += "AccelerationStructureBuildKHR | ";
    if ( value & PipelineStageFlagBits::eRayTracingShaderKHR )
      result += "RayTracingShaderKHR | ";
    if ( value & PipelineStageFlagBits::eTaskShaderNV )
      result += "TaskShaderNV | ";
    if ( value & PipelineStageFlagBits::eMeshShaderNV )
      result += "MeshShaderNV | ";
    if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT )
      result += "FragmentDensityProcessEXT | ";
    if ( value & PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR )
      result += "FragmentShadingRateAttachmentKHR | ";
    if ( value & PipelineStageFlagBits::eCommandPreprocessNV )
      result += "CommandPreprocessNV | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class MemoryMapFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits )
  {
    return "(void)";
  }

  using MemoryMapFlags = Flags<MemoryMapFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags )
  {
    return "{}";
  }

  using ImageAspectFlags = Flags<ImageAspectFlagBits>;

  template <>
  struct FlagTraits<ImageAspectFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ImageAspectFlagBits::eColor ) | VkFlags( ImageAspectFlagBits::eDepth ) |
                 VkFlags( ImageAspectFlagBits::eStencil ) | VkFlags( ImageAspectFlagBits::eMetadata ) |
                 VkFlags( ImageAspectFlagBits::ePlane0 ) | VkFlags( ImageAspectFlagBits::ePlane1 ) |
                 VkFlags( ImageAspectFlagBits::ePlane2 ) | VkFlags( ImageAspectFlagBits::eMemoryPlane0EXT ) |
                 VkFlags( ImageAspectFlagBits::eMemoryPlane1EXT ) | VkFlags( ImageAspectFlagBits::eMemoryPlane2EXT ) |
                 VkFlags( ImageAspectFlagBits::eMemoryPlane3EXT )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0,
                                                                     ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ImageAspectFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&(ImageAspectFlagBits bit0,
                                                                    ImageAspectFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ImageAspectFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0,
                                                                     ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ImageAspectFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( ImageAspectFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ImageAspectFlagBits::eColor )
      result += "Color | ";
    if ( value & ImageAspectFlagBits::eDepth )
      result += "Depth | ";
    if ( value & ImageAspectFlagBits::eStencil )
      result += "Stencil | ";
    if ( value & ImageAspectFlagBits::eMetadata )
      result += "Metadata | ";
    if ( value & ImageAspectFlagBits::ePlane0 )
      result += "Plane0 | ";
    if ( value & ImageAspectFlagBits::ePlane1 )
      result += "Plane1 | ";
    if ( value & ImageAspectFlagBits::ePlane2 )
      result += "Plane2 | ";
    if ( value & ImageAspectFlagBits::eMemoryPlane0EXT )
      result += "MemoryPlane0EXT | ";
    if ( value & ImageAspectFlagBits::eMemoryPlane1EXT )
      result += "MemoryPlane1EXT | ";
    if ( value & ImageAspectFlagBits::eMemoryPlane2EXT )
      result += "MemoryPlane2EXT | ";
    if ( value & ImageAspectFlagBits::eMemoryPlane3EXT )
      result += "MemoryPlane3EXT | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits>;

  template <>
  struct FlagTraits<SparseImageFormatFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SparseImageFormatFlagBits::eSingleMiptail ) |
                 VkFlags( SparseImageFormatFlagBits::eAlignedMipSize ) |
                 VkFlags( SparseImageFormatFlagBits::eNonstandardBlockSize )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags
                                         operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SparseImageFormatFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags
                                         operator&(SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return SparseImageFormatFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags
                                         operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SparseImageFormatFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( SparseImageFormatFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SparseImageFormatFlagBits::eSingleMiptail )
      result += "SingleMiptail | ";
    if ( value & SparseImageFormatFlagBits::eAlignedMipSize )
      result += "AlignedMipSize | ";
    if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize )
      result += "NonstandardBlockSize | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits>;

  template <>
  struct FlagTraits<SparseMemoryBindFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SparseMemoryBindFlagBits::eMetadata )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags
                                         operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SparseMemoryBindFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags
                                         operator&(SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return SparseMemoryBindFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags
                                         operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SparseMemoryBindFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( SparseMemoryBindFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SparseMemoryBindFlagBits::eMetadata )
      result += "Metadata | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using FenceCreateFlags = Flags<FenceCreateFlagBits>;

  template <>
  struct FlagTraits<FenceCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( FenceCreateFlagBits::eSignaled )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0,
                                                                     FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return FenceCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&(FenceCreateFlagBits bit0,
                                                                    FenceCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return FenceCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0,
                                                                     FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return FenceCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( FenceCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & FenceCreateFlagBits::eSignaled )
      result += "Signaled | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class SemaphoreCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
  {
    return "(void)";
  }

  using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags )
  {
    return "{}";
  }

  using EventCreateFlags = Flags<EventCreateFlagBits>;

  template <>
  struct FlagTraits<EventCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( EventCreateFlagBits::eDeviceOnlyKHR )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator|( EventCreateFlagBits bit0,
                                                                     EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return EventCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator&(EventCreateFlagBits bit0,
                                                                    EventCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return EventCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator^( EventCreateFlagBits bit0,
                                                                     EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return EventCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator~( EventCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( EventCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( EventCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & EventCreateFlagBits::eDeviceOnlyKHR )
      result += "DeviceOnlyKHR | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits>;

  template <>
  struct FlagTraits<QueryPipelineStatisticFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eClippingInvocations ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eClippingPrimitives ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) |
                 VkFlags( QueryPipelineStatisticFlagBits::eComputeShaderInvocations )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags
                                         operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return QueryPipelineStatisticFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags
                                         operator&(QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return QueryPipelineStatisticFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags
                                         operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return QueryPipelineStatisticFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( QueryPipelineStatisticFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices )
      result += "InputAssemblyVertices | ";
    if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives )
      result += "InputAssemblyPrimitives | ";
    if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations )
      result += "VertexShaderInvocations | ";
    if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations )
      result += "GeometryShaderInvocations | ";
    if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives )
      result += "GeometryShaderPrimitives | ";
    if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations )
      result += "ClippingInvocations | ";
    if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives )
      result += "ClippingPrimitives | ";
    if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations )
      result += "FragmentShaderInvocations | ";
    if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches )
      result += "TessellationControlShaderPatches | ";
    if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations )
      result += "TessellationEvaluationShaderInvocations | ";
    if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations )
      result += "ComputeShaderInvocations | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags )
  {
    return "{}";
  }

  using QueryResultFlags = Flags<QueryResultFlagBits>;

  template <>
  struct FlagTraits<QueryResultFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( QueryResultFlagBits::e64 ) | VkFlags( QueryResultFlagBits::eWait ) |
                 VkFlags( QueryResultFlagBits::eWithAvailability ) | VkFlags( QueryResultFlagBits::ePartial )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
                 | VkFlags( QueryResultFlagBits::eWithStatusKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0,
                                                                     QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return QueryResultFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&(QueryResultFlagBits bit0,
                                                                    QueryResultFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return QueryResultFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0,
                                                                     QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return QueryResultFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( QueryResultFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & QueryResultFlagBits::e64 )
      result += "64 | ";
    if ( value & QueryResultFlagBits::eWait )
      result += "Wait | ";
    if ( value & QueryResultFlagBits::eWithAvailability )
      result += "WithAvailability | ";
    if ( value & QueryResultFlagBits::ePartial )
      result += "Partial | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & QueryResultFlagBits::eWithStatusKHR )
      result += "WithStatusKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using BufferCreateFlags = Flags<BufferCreateFlagBits>;

  template <>
  struct FlagTraits<BufferCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( BufferCreateFlagBits::eSparseBinding ) | VkFlags( BufferCreateFlagBits::eSparseResidency ) |
                 VkFlags( BufferCreateFlagBits::eSparseAliased ) | VkFlags( BufferCreateFlagBits::eProtected ) |
                 VkFlags( BufferCreateFlagBits::eDeviceAddressCaptureReplay )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0,
                                                                      BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return BufferCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&(BufferCreateFlagBits bit0,
                                                                     BufferCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return BufferCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0,
                                                                      BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return BufferCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator~( BufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( BufferCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & BufferCreateFlagBits::eSparseBinding )
      result += "SparseBinding | ";
    if ( value & BufferCreateFlagBits::eSparseResidency )
      result += "SparseResidency | ";
    if ( value & BufferCreateFlagBits::eSparseAliased )
      result += "SparseAliased | ";
    if ( value & BufferCreateFlagBits::eProtected )
      result += "Protected | ";
    if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay )
      result += "DeviceAddressCaptureReplay | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using BufferUsageFlags = Flags<BufferUsageFlagBits>;

  template <>
  struct FlagTraits<BufferUsageFlagBits>
  {
    enum : VkFlags
    {
      allFlags =
        VkFlags( BufferUsageFlagBits::eTransferSrc ) | VkFlags( BufferUsageFlagBits::eTransferDst ) |
        VkFlags( BufferUsageFlagBits::eUniformTexelBuffer ) | VkFlags( BufferUsageFlagBits::eStorageTexelBuffer ) |
        VkFlags( BufferUsageFlagBits::eUniformBuffer ) | VkFlags( BufferUsageFlagBits::eStorageBuffer ) |
        VkFlags( BufferUsageFlagBits::eIndexBuffer ) | VkFlags( BufferUsageFlagBits::eVertexBuffer ) |
        VkFlags( BufferUsageFlagBits::eIndirectBuffer ) | VkFlags( BufferUsageFlagBits::eShaderDeviceAddress )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
        | VkFlags( BufferUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( BufferUsageFlagBits::eVideoDecodeDstKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
        | VkFlags( BufferUsageFlagBits::eTransformFeedbackBufferEXT ) |
        VkFlags( BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) |
        VkFlags( BufferUsageFlagBits::eConditionalRenderingEXT ) |
        VkFlags( BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) |
        VkFlags( BufferUsageFlagBits::eAccelerationStructureStorageKHR ) |
        VkFlags( BufferUsageFlagBits::eShaderBindingTableKHR )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
        | VkFlags( BufferUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( BufferUsageFlagBits::eVideoEncodeSrcKHR )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0,
                                                                     BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return BufferUsageFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&(BufferUsageFlagBits bit0,
                                                                    BufferUsageFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return BufferUsageFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0,
                                                                     BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return BufferUsageFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator~( BufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( BufferUsageFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & BufferUsageFlagBits::eTransferSrc )
      result += "TransferSrc | ";
    if ( value & BufferUsageFlagBits::eTransferDst )
      result += "TransferDst | ";
    if ( value & BufferUsageFlagBits::eUniformTexelBuffer )
      result += "UniformTexelBuffer | ";
    if ( value & BufferUsageFlagBits::eStorageTexelBuffer )
      result += "StorageTexelBuffer | ";
    if ( value & BufferUsageFlagBits::eUniformBuffer )
      result += "UniformBuffer | ";
    if ( value & BufferUsageFlagBits::eStorageBuffer )
      result += "StorageBuffer | ";
    if ( value & BufferUsageFlagBits::eIndexBuffer )
      result += "IndexBuffer | ";
    if ( value & BufferUsageFlagBits::eVertexBuffer )
      result += "VertexBuffer | ";
    if ( value & BufferUsageFlagBits::eIndirectBuffer )
      result += "IndirectBuffer | ";
    if ( value & BufferUsageFlagBits::eShaderDeviceAddress )
      result += "ShaderDeviceAddress | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR )
      result += "VideoDecodeSrcKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR )
      result += "VideoDecodeDstKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT )
      result += "TransformFeedbackBufferEXT | ";
    if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT )
      result += "TransformFeedbackCounterBufferEXT | ";
    if ( value & BufferUsageFlagBits::eConditionalRenderingEXT )
      result += "ConditionalRenderingEXT | ";
    if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR )
      result += "AccelerationStructureBuildInputReadOnlyKHR | ";
    if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR )
      result += "AccelerationStructureStorageKHR | ";
    if ( value & BufferUsageFlagBits::eShaderBindingTableKHR )
      result += "ShaderBindingTableKHR | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR )
      result += "VideoEncodeDstKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR )
      result += "VideoEncodeSrcKHR | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class BufferViewCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
  {
    return "(void)";
  }

  using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags )
  {
    return "{}";
  }

  using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits>;

  template <>
  struct FlagTraits<ImageViewCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) |
                 VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags
                                         operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ImageViewCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&(ImageViewCreateFlagBits bit0,
                                                                        ImageViewCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ImageViewCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags
                                         operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ImageViewCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( ImageViewCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT )
      result += "FragmentDensityMapDynamicEXT | ";
    if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT )
      result += "FragmentDensityMapDeferredEXT | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags )
  {
    return "{}";
  }

  using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits>;

  template <>
  struct FlagTraits<PipelineCacheCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( PipelineCacheCreateFlagBits::eExternallySynchronizedEXT )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags
                                         operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineCacheCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags
                                         operator&(PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return PipelineCacheCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags
                                         operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineCacheCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( PipelineCacheCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT )
      result += "ExternallySynchronizedEXT | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ColorComponentFlags = Flags<ColorComponentFlagBits>;

  template <>
  struct FlagTraits<ColorComponentFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ColorComponentFlagBits::eR ) | VkFlags( ColorComponentFlagBits::eG ) |
                 VkFlags( ColorComponentFlagBits::eB ) | VkFlags( ColorComponentFlagBits::eA )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags
                                         operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ColorComponentFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&(ColorComponentFlagBits bit0,
                                                                       ColorComponentFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ColorComponentFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags
                                         operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ColorComponentFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( ColorComponentFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ColorComponentFlagBits::eR )
      result += "R | ";
    if ( value & ColorComponentFlagBits::eG )
      result += "G | ";
    if ( value & ColorComponentFlagBits::eB )
      result += "B | ";
    if ( value & ColorComponentFlagBits::eA )
      result += "A | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using CullModeFlags = Flags<CullModeFlagBits>;

  template <>
  struct FlagTraits<CullModeFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( CullModeFlagBits::eNone ) | VkFlags( CullModeFlagBits::eFront ) |
                 VkFlags( CullModeFlagBits::eBack ) | VkFlags( CullModeFlagBits::eFrontAndBack )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0,
                                                                  CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CullModeFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&(CullModeFlagBits bit0,
                                                                 CullModeFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return CullModeFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0,
                                                                  CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CullModeFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( CullModeFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( CullModeFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & CullModeFlagBits::eFront )
      result += "Front | ";
    if ( value & CullModeFlagBits::eBack )
      result += "Back | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class PipelineColorBlendStateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags )
  {
    return "{}";
  }

  using PipelineCreateFlags = Flags<PipelineCreateFlagBits>;

  template <>
  struct FlagTraits<PipelineCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags =
        VkFlags( PipelineCreateFlagBits::eDisableOptimization ) | VkFlags( PipelineCreateFlagBits::eAllowDerivatives ) |
        VkFlags( PipelineCreateFlagBits::eDerivative ) | VkFlags( PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) |
        VkFlags( PipelineCreateFlagBits::eDispatchBase ) |
        VkFlags( PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) |
        VkFlags( PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) |
        VkFlags( PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) |
        VkFlags( PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) |
        VkFlags( PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) |
        VkFlags( PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) |
        VkFlags( PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) |
        VkFlags( PipelineCreateFlagBits::eDeferCompileNV ) | VkFlags( PipelineCreateFlagBits::eCaptureStatisticsKHR ) |
        VkFlags( PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) |
        VkFlags( PipelineCreateFlagBits::eIndirectBindableNV ) | VkFlags( PipelineCreateFlagBits::eLibraryKHR ) |
        VkFlags( PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) |
        VkFlags( PipelineCreateFlagBits::eEarlyReturnOnFailureEXT )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags
                                         operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&(PipelineCreateFlagBits bit0,
                                                                       PipelineCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return PipelineCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags
                                         operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( PipelineCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & PipelineCreateFlagBits::eDisableOptimization )
      result += "DisableOptimization | ";
    if ( value & PipelineCreateFlagBits::eAllowDerivatives )
      result += "AllowDerivatives | ";
    if ( value & PipelineCreateFlagBits::eDerivative )
      result += "Derivative | ";
    if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex )
      result += "ViewIndexFromDeviceIndex | ";
    if ( value & PipelineCreateFlagBits::eDispatchBase )
      result += "DispatchBase | ";
    if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR )
      result += "RayTracingNoNullAnyHitShadersKHR | ";
    if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR )
      result += "RayTracingNoNullClosestHitShadersKHR | ";
    if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR )
      result += "RayTracingNoNullMissShadersKHR | ";
    if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR )
      result += "RayTracingNoNullIntersectionShadersKHR | ";
    if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR )
      result += "RayTracingSkipTrianglesKHR | ";
    if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR )
      result += "RayTracingSkipAabbsKHR | ";
    if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR )
      result += "RayTracingShaderGroupHandleCaptureReplayKHR | ";
    if ( value & PipelineCreateFlagBits::eDeferCompileNV )
      result += "DeferCompileNV | ";
    if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR )
      result += "CaptureStatisticsKHR | ";
    if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR )
      result += "CaptureInternalRepresentationsKHR | ";
    if ( value & PipelineCreateFlagBits::eIndirectBindableNV )
      result += "IndirectBindableNV | ";
    if ( value & PipelineCreateFlagBits::eLibraryKHR )
      result += "LibraryKHR | ";
    if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT )
      result += "FailOnPipelineCompileRequiredEXT | ";
    if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT )
      result += "EarlyReturnOnFailureEXT | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class PipelineDepthStencilStateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags )
  {
    return "{}";
  }

  enum class PipelineDynamicStateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags )
  {
    return "{}";
  }

  enum class PipelineInputAssemblyStateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags )
  {
    return "{}";
  }

  enum class PipelineLayoutCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags )
  {
    return "{}";
  }

  enum class PipelineMultisampleStateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags )
  {
    return "{}";
  }

  enum class PipelineRasterizationStateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags )
  {
    return "{}";
  }

  using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits>;

  template <>
  struct FlagTraits<PipelineShaderStageCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) |
                 VkFlags( PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags
                                         operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineShaderStageCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags
                                         operator&(PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return PipelineShaderStageCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags
                                         operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineShaderStageCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags
                                         operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( PipelineShaderStageCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT )
      result += "AllowVaryingSubgroupSizeEXT | ";
    if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT )
      result += "RequireFullSubgroupsEXT | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class PipelineTessellationStateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags )
  {
    return "{}";
  }

  enum class PipelineVertexInputStateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags )
  {
    return "{}";
  }

  enum class PipelineViewportStateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
  {
    return "(void)";
  }

  using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags )
  {
    return "{}";
  }

  using ShaderStageFlags = Flags<ShaderStageFlagBits>;

  template <>
  struct FlagTraits<ShaderStageFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ShaderStageFlagBits::eVertex ) | VkFlags( ShaderStageFlagBits::eTessellationControl ) |
                 VkFlags( ShaderStageFlagBits::eTessellationEvaluation ) | VkFlags( ShaderStageFlagBits::eGeometry ) |
                 VkFlags( ShaderStageFlagBits::eFragment ) | VkFlags( ShaderStageFlagBits::eCompute ) |
                 VkFlags( ShaderStageFlagBits::eAllGraphics ) | VkFlags( ShaderStageFlagBits::eAll ) |
                 VkFlags( ShaderStageFlagBits::eRaygenKHR ) | VkFlags( ShaderStageFlagBits::eAnyHitKHR ) |
                 VkFlags( ShaderStageFlagBits::eClosestHitKHR ) | VkFlags( ShaderStageFlagBits::eMissKHR ) |
                 VkFlags( ShaderStageFlagBits::eIntersectionKHR ) | VkFlags( ShaderStageFlagBits::eCallableKHR ) |
                 VkFlags( ShaderStageFlagBits::eTaskNV ) | VkFlags( ShaderStageFlagBits::eMeshNV )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0,
                                                                     ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ShaderStageFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&(ShaderStageFlagBits bit0,
                                                                    ShaderStageFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ShaderStageFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0,
                                                                     ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ShaderStageFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( ShaderStageFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ShaderStageFlagBits::eVertex )
      result += "Vertex | ";
    if ( value & ShaderStageFlagBits::eTessellationControl )
      result += "TessellationControl | ";
    if ( value & ShaderStageFlagBits::eTessellationEvaluation )
      result += "TessellationEvaluation | ";
    if ( value & ShaderStageFlagBits::eGeometry )
      result += "Geometry | ";
    if ( value & ShaderStageFlagBits::eFragment )
      result += "Fragment | ";
    if ( value & ShaderStageFlagBits::eCompute )
      result += "Compute | ";
    if ( value & ShaderStageFlagBits::eRaygenKHR )
      result += "RaygenKHR | ";
    if ( value & ShaderStageFlagBits::eAnyHitKHR )
      result += "AnyHitKHR | ";
    if ( value & ShaderStageFlagBits::eClosestHitKHR )
      result += "ClosestHitKHR | ";
    if ( value & ShaderStageFlagBits::eMissKHR )
      result += "MissKHR | ";
    if ( value & ShaderStageFlagBits::eIntersectionKHR )
      result += "IntersectionKHR | ";
    if ( value & ShaderStageFlagBits::eCallableKHR )
      result += "CallableKHR | ";
    if ( value & ShaderStageFlagBits::eTaskNV )
      result += "TaskNV | ";
    if ( value & ShaderStageFlagBits::eMeshNV )
      result += "MeshNV | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using SamplerCreateFlags = Flags<SamplerCreateFlagBits>;

  template <>
  struct FlagTraits<SamplerCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SamplerCreateFlagBits::eSubsampledEXT ) |
                 VkFlags( SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0,
                                                                       SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SamplerCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&(SamplerCreateFlagBits bit0,
                                                                      SamplerCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return SamplerCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0,
                                                                       SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SamplerCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( SamplerCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SamplerCreateFlagBits::eSubsampledEXT )
      result += "SubsampledEXT | ";
    if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT )
      result += "SubsampledCoarseReconstructionEXT | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits>;

  template <>
  struct FlagTraits<DescriptorPoolCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) |
                 VkFlags( DescriptorPoolCreateFlagBits::eUpdateAfterBind ) |
                 VkFlags( DescriptorPoolCreateFlagBits::eHostOnlyVALVE )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags
                                         operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DescriptorPoolCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags
                                         operator&(DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return DescriptorPoolCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags
                                         operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DescriptorPoolCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( DescriptorPoolCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet )
      result += "FreeDescriptorSet | ";
    if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind )
      result += "UpdateAfterBind | ";
    if ( value & DescriptorPoolCreateFlagBits::eHostOnlyVALVE )
      result += "HostOnlyVALVE | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class DescriptorPoolResetFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits )
  {
    return "(void)";
  }

  using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits>;

  VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags )
  {
    return "{}";
  }

  using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits>;

  template <>
  struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) |
                 VkFlags( DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) |
                 VkFlags( DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags
                                         operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags
                                         operator&(DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return DescriptorSetLayoutCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags
                                         operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags
                                         operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( DescriptorSetLayoutCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool )
      result += "UpdateAfterBindPool | ";
    if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR )
      result += "PushDescriptorKHR | ";
    if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE )
      result += "HostOnlyPoolVALVE | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using AccessFlags = Flags<AccessFlagBits>;

  template <>
  struct FlagTraits<AccessFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( AccessFlagBits::eIndirectCommandRead ) | VkFlags( AccessFlagBits::eIndexRead ) |
                 VkFlags( AccessFlagBits::eVertexAttributeRead ) | VkFlags( AccessFlagBits::eUniformRead ) |
                 VkFlags( AccessFlagBits::eInputAttachmentRead ) | VkFlags( AccessFlagBits::eShaderRead ) |
                 VkFlags( AccessFlagBits::eShaderWrite ) | VkFlags( AccessFlagBits::eColorAttachmentRead ) |
                 VkFlags( AccessFlagBits::eColorAttachmentWrite ) |
                 VkFlags( AccessFlagBits::eDepthStencilAttachmentRead ) |
                 VkFlags( AccessFlagBits::eDepthStencilAttachmentWrite ) | VkFlags( AccessFlagBits::eTransferRead ) |
                 VkFlags( AccessFlagBits::eTransferWrite ) | VkFlags( AccessFlagBits::eHostRead ) |
                 VkFlags( AccessFlagBits::eHostWrite ) | VkFlags( AccessFlagBits::eMemoryRead ) |
                 VkFlags( AccessFlagBits::eMemoryWrite ) | VkFlags( AccessFlagBits::eTransformFeedbackWriteEXT ) |
                 VkFlags( AccessFlagBits::eTransformFeedbackCounterReadEXT ) |
                 VkFlags( AccessFlagBits::eTransformFeedbackCounterWriteEXT ) |
                 VkFlags( AccessFlagBits::eConditionalRenderingReadEXT ) |
                 VkFlags( AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) |
                 VkFlags( AccessFlagBits::eAccelerationStructureReadKHR ) |
                 VkFlags( AccessFlagBits::eAccelerationStructureWriteKHR ) |
                 VkFlags( AccessFlagBits::eFragmentDensityMapReadEXT ) |
                 VkFlags( AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) |
                 VkFlags( AccessFlagBits::eCommandPreprocessReadNV ) |
                 VkFlags( AccessFlagBits::eCommandPreprocessWriteNV ) | VkFlags( AccessFlagBits::eNoneKHR )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0,
                                                                AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return AccessFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&(AccessFlagBits bit0,
                                                               AccessFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return AccessFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0,
                                                                AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return AccessFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( AccessFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( AccessFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & AccessFlagBits::eIndirectCommandRead )
      result += "IndirectCommandRead | ";
    if ( value & AccessFlagBits::eIndexRead )
      result += "IndexRead | ";
    if ( value & AccessFlagBits::eVertexAttributeRead )
      result += "VertexAttributeRead | ";
    if ( value & AccessFlagBits::eUniformRead )
      result += "UniformRead | ";
    if ( value & AccessFlagBits::eInputAttachmentRead )
      result += "InputAttachmentRead | ";
    if ( value & AccessFlagBits::eShaderRead )
      result += "ShaderRead | ";
    if ( value & AccessFlagBits::eShaderWrite )
      result += "ShaderWrite | ";
    if ( value & AccessFlagBits::eColorAttachmentRead )
      result += "ColorAttachmentRead | ";
    if ( value & AccessFlagBits::eColorAttachmentWrite )
      result += "ColorAttachmentWrite | ";
    if ( value & AccessFlagBits::eDepthStencilAttachmentRead )
      result += "DepthStencilAttachmentRead | ";
    if ( value & AccessFlagBits::eDepthStencilAttachmentWrite )
      result += "DepthStencilAttachmentWrite | ";
    if ( value & AccessFlagBits::eTransferRead )
      result += "TransferRead | ";
    if ( value & AccessFlagBits::eTransferWrite )
      result += "TransferWrite | ";
    if ( value & AccessFlagBits::eHostRead )
      result += "HostRead | ";
    if ( value & AccessFlagBits::eHostWrite )
      result += "HostWrite | ";
    if ( value & AccessFlagBits::eMemoryRead )
      result += "MemoryRead | ";
    if ( value & AccessFlagBits::eMemoryWrite )
      result += "MemoryWrite | ";
    if ( value & AccessFlagBits::eTransformFeedbackWriteEXT )
      result += "TransformFeedbackWriteEXT | ";
    if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT )
      result += "TransformFeedbackCounterReadEXT | ";
    if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT )
      result += "TransformFeedbackCounterWriteEXT | ";
    if ( value & AccessFlagBits::eConditionalRenderingReadEXT )
      result += "ConditionalRenderingReadEXT | ";
    if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT )
      result += "ColorAttachmentReadNoncoherentEXT | ";
    if ( value & AccessFlagBits::eAccelerationStructureReadKHR )
      result += "AccelerationStructureReadKHR | ";
    if ( value & AccessFlagBits::eAccelerationStructureWriteKHR )
      result += "AccelerationStructureWriteKHR | ";
    if ( value & AccessFlagBits::eFragmentDensityMapReadEXT )
      result += "FragmentDensityMapReadEXT | ";
    if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR )
      result += "FragmentShadingRateAttachmentReadKHR | ";
    if ( value & AccessFlagBits::eCommandPreprocessReadNV )
      result += "CommandPreprocessReadNV | ";
    if ( value & AccessFlagBits::eCommandPreprocessWriteNV )
      result += "CommandPreprocessWriteNV | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits>;

  template <>
  struct FlagTraits<AttachmentDescriptionFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( AttachmentDescriptionFlagBits::eMayAlias )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags
                                         operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return AttachmentDescriptionFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags
                                         operator&(AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return AttachmentDescriptionFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags
                                         operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return AttachmentDescriptionFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( AttachmentDescriptionFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & AttachmentDescriptionFlagBits::eMayAlias )
      result += "MayAlias | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using DependencyFlags = Flags<DependencyFlagBits>;

  template <>
  struct FlagTraits<DependencyFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DependencyFlagBits::eByRegion ) | VkFlags( DependencyFlagBits::eDeviceGroup ) |
                 VkFlags( DependencyFlagBits::eViewLocal )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0,
                                                                    DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DependencyFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&(DependencyFlagBits bit0,
                                                                   DependencyFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return DependencyFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0,
                                                                    DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DependencyFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( DependencyFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DependencyFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DependencyFlagBits::eByRegion )
      result += "ByRegion | ";
    if ( value & DependencyFlagBits::eDeviceGroup )
      result += "DeviceGroup | ";
    if ( value & DependencyFlagBits::eViewLocal )
      result += "ViewLocal | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits>;

  template <>
  struct FlagTraits<FramebufferCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( FramebufferCreateFlagBits::eImageless )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags
                                         operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return FramebufferCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags
                                         operator&(FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return FramebufferCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags
                                         operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return FramebufferCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( FramebufferCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & FramebufferCreateFlagBits::eImageless )
      result += "Imageless | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits>;

  template <>
  struct FlagTraits<RenderPassCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( RenderPassCreateFlagBits::eTransformQCOM )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags
                                         operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return RenderPassCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags
                                         operator&(RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return RenderPassCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags
                                         operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return RenderPassCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( RenderPassCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & RenderPassCreateFlagBits::eTransformQCOM )
      result += "TransformQCOM | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;

  template <>
  struct FlagTraits<SubpassDescriptionFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SubpassDescriptionFlagBits::ePerViewAttributesNVX ) |
                 VkFlags( SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) |
                 VkFlags( SubpassDescriptionFlagBits::eFragmentRegionQCOM ) |
                 VkFlags( SubpassDescriptionFlagBits::eShaderResolveQCOM )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags
                                         operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SubpassDescriptionFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags
                                         operator&(SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return SubpassDescriptionFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags
                                         operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SubpassDescriptionFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( SubpassDescriptionFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX )
      result += "PerViewAttributesNVX | ";
    if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX )
      result += "PerViewPositionXOnlyNVX | ";
    if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM )
      result += "FragmentRegionQCOM | ";
    if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM )
      result += "ShaderResolveQCOM | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits>;

  template <>
  struct FlagTraits<CommandPoolCreateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( CommandPoolCreateFlagBits::eTransient ) |
                 VkFlags( CommandPoolCreateFlagBits::eResetCommandBuffer ) |
                 VkFlags( CommandPoolCreateFlagBits::eProtected )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags
                                         operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CommandPoolCreateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags
                                         operator&(CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return CommandPoolCreateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags
                                         operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CommandPoolCreateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( CommandPoolCreateFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & CommandPoolCreateFlagBits::eTransient )
      result += "Transient | ";
    if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer )
      result += "ResetCommandBuffer | ";
    if ( value & CommandPoolCreateFlagBits::eProtected )
      result += "Protected | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits>;

  template <>
  struct FlagTraits<CommandPoolResetFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( CommandPoolResetFlagBits::eReleaseResources )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags
                                         operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CommandPoolResetFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags
                                         operator&(CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return CommandPoolResetFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags
                                         operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CommandPoolResetFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( CommandPoolResetFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & CommandPoolResetFlagBits::eReleaseResources )
      result += "ReleaseResources | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits>;

  template <>
  struct FlagTraits<CommandBufferResetFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( CommandBufferResetFlagBits::eReleaseResources )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags
                                         operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CommandBufferResetFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags
                                         operator&(CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return CommandBufferResetFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags
                                         operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CommandBufferResetFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( CommandBufferResetFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & CommandBufferResetFlagBits::eReleaseResources )
      result += "ReleaseResources | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits>;

  template <>
  struct FlagTraits<CommandBufferUsageFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( CommandBufferUsageFlagBits::eOneTimeSubmit ) |
                 VkFlags( CommandBufferUsageFlagBits::eRenderPassContinue ) |
                 VkFlags( CommandBufferUsageFlagBits::eSimultaneousUse )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags
                                         operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CommandBufferUsageFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags
                                         operator&(CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return CommandBufferUsageFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags
                                         operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CommandBufferUsageFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( CommandBufferUsageFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit )
      result += "OneTimeSubmit | ";
    if ( value & CommandBufferUsageFlagBits::eRenderPassContinue )
      result += "RenderPassContinue | ";
    if ( value & CommandBufferUsageFlagBits::eSimultaneousUse )
      result += "SimultaneousUse | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using QueryControlFlags = Flags<QueryControlFlagBits>;

  template <>
  struct FlagTraits<QueryControlFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( QueryControlFlagBits::ePrecise )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0,
                                                                      QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return QueryControlFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&(QueryControlFlagBits bit0,
                                                                     QueryControlFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return QueryControlFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0,
                                                                      QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return QueryControlFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( QueryControlFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & QueryControlFlagBits::ePrecise )
      result += "Precise | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using StencilFaceFlags = Flags<StencilFaceFlagBits>;

  template <>
  struct FlagTraits<StencilFaceFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( StencilFaceFlagBits::eFront ) | VkFlags( StencilFaceFlagBits::eBack ) |
                 VkFlags( StencilFaceFlagBits::eFrontAndBack )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0,
                                                                     StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return StencilFaceFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&(StencilFaceFlagBits bit0,
                                                                    StencilFaceFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return StencilFaceFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0,
                                                                     StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return StencilFaceFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( StencilFaceFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & StencilFaceFlagBits::eFront )
      result += "Front | ";
    if ( value & StencilFaceFlagBits::eBack )
      result += "Back | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_VERSION_1_1 ===

  using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits>;

  template <>
  struct FlagTraits<SubgroupFeatureFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SubgroupFeatureFlagBits::eBasic ) | VkFlags( SubgroupFeatureFlagBits::eVote ) |
                 VkFlags( SubgroupFeatureFlagBits::eArithmetic ) | VkFlags( SubgroupFeatureFlagBits::eBallot ) |
                 VkFlags( SubgroupFeatureFlagBits::eShuffle ) | VkFlags( SubgroupFeatureFlagBits::eShuffleRelative ) |
                 VkFlags( SubgroupFeatureFlagBits::eClustered ) | VkFlags( SubgroupFeatureFlagBits::eQuad ) |
                 VkFlags( SubgroupFeatureFlagBits::ePartitionedNV )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags
                                         operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SubgroupFeatureFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&(SubgroupFeatureFlagBits bit0,
                                                                        SubgroupFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return SubgroupFeatureFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags
                                         operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SubgroupFeatureFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( SubgroupFeatureFlags( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SubgroupFeatureFlagBits::eBasic )
      result += "Basic | ";
    if ( value & SubgroupFeatureFlagBits::eVote )
      result += "Vote | ";
    if ( value & SubgroupFeatureFlagBits::eArithmetic )
      result += "Arithmetic | ";
    if ( value & SubgroupFeatureFlagBits::eBallot )
      result += "Ballot | ";
    if ( value & SubgroupFeatureFlagBits::eShuffle )
      result += "Shuffle | ";
    if ( value & SubgroupFeatureFlagBits::eShuffleRelative )
      result += "ShuffleRelative | ";
    if ( value & SubgroupFeatureFlagBits::eClustered )
      result += "Clustered | ";
    if ( value & SubgroupFeatureFlagBits::eQuad )
      result += "Quad | ";
    if ( value & SubgroupFeatureFlagBits::ePartitionedNV )
      result += "PartitionedNV | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits>;

  template <>
  struct FlagTraits<PeerMemoryFeatureFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( PeerMemoryFeatureFlagBits::eCopySrc ) | VkFlags( PeerMemoryFeatureFlagBits::eCopyDst ) |
                 VkFlags( PeerMemoryFeatureFlagBits::eGenericSrc ) | VkFlags( PeerMemoryFeatureFlagBits::eGenericDst )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags
                                         operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PeerMemoryFeatureFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags
                                         operator&(PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return PeerMemoryFeatureFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags
                                         operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PeerMemoryFeatureFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( PeerMemoryFeatureFlags( bits ) );
  }

  using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;

  VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & PeerMemoryFeatureFlagBits::eCopySrc )
      result += "CopySrc | ";
    if ( value & PeerMemoryFeatureFlagBits::eCopyDst )
      result += "CopyDst | ";
    if ( value & PeerMemoryFeatureFlagBits::eGenericSrc )
      result += "GenericSrc | ";
    if ( value & PeerMemoryFeatureFlagBits::eGenericDst )
      result += "GenericDst | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits>;

  template <>
  struct FlagTraits<MemoryAllocateFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( MemoryAllocateFlagBits::eDeviceMask ) | VkFlags( MemoryAllocateFlagBits::eDeviceAddress ) |
                 VkFlags( MemoryAllocateFlagBits::eDeviceAddressCaptureReplay )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags
                                         operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return MemoryAllocateFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&(MemoryAllocateFlagBits bit0,
                                                                       MemoryAllocateFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return MemoryAllocateFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags
                                         operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return MemoryAllocateFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( MemoryAllocateFlags( bits ) );
  }

  using MemoryAllocateFlagsKHR = MemoryAllocateFlags;

  VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & MemoryAllocateFlagBits::eDeviceMask )
      result += "DeviceMask | ";
    if ( value & MemoryAllocateFlagBits::eDeviceAddress )
      result += "DeviceAddress | ";
    if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay )
      result += "DeviceAddressCaptureReplay | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class CommandPoolTrimFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits )
  {
    return "(void)";
  }

  using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits>;

  using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;

  VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags )
  {
    return "{}";
  }

  enum class DescriptorUpdateTemplateCreateFlagBits : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits )
  {
    return "(void)";
  }

  using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits>;

  using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;

  VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags )
  {
    return "{}";
  }

  using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits>;

  template <>
  struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBits::eDmaBufEXT )
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
                 | VkFlags( ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID )
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
                 | VkFlags( ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT )
#if defined( VK_USE_PLATFORM_FUCHSIA )
                 | VkFlags( ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA )
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags
                                         operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryHandleTypeFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags
                                         operator&(ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryHandleTypeFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags
                                         operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags
                                         operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( ExternalMemoryHandleTypeFlags( bits ) );
  }

  using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;

  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd )
      result += "OpaqueFd | ";
    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 )
      result += "OpaqueWin32 | ";
    if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt )
      result += "OpaqueWin32Kmt | ";
    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture )
      result += "D3D11Texture | ";
    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt )
      result += "D3D11TextureKmt | ";
    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap )
      result += "D3D12Heap | ";
    if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource )
      result += "D3D12Resource | ";
    if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT )
      result += "DmaBufEXT | ";
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
    if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID )
      result += "AndroidHardwareBufferANDROID | ";
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
    if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT )
      result += "HostAllocationEXT | ";
    if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT )
      result += "HostMappedForeignMemoryEXT | ";
#if defined( VK_USE_PLATFORM_FUCHSIA )
    if ( value & ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA )
      result += "ZirconVmoFUCHSIA | ";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits>;

  template <>
  struct FlagTraits<ExternalMemoryFeatureFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ExternalMemoryFeatureFlagBits::eDedicatedOnly ) |
                 VkFlags( ExternalMemoryFeatureFlagBits::eExportable ) |
                 VkFlags( ExternalMemoryFeatureFlagBits::eImportable )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags
                                         operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryFeatureFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags
                                         operator&(ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryFeatureFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags
                                         operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryFeatureFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( ExternalMemoryFeatureFlags( bits ) );
  }

  using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;

  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly )
      result += "DedicatedOnly | ";
    if ( value & ExternalMemoryFeatureFlagBits::eExportable )
      result += "Exportable | ";
    if ( value & ExternalMemoryFeatureFlagBits::eImportable )
      result += "Importable | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits>;

  template <>
  struct FlagTraits<ExternalFenceHandleTypeFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueFd ) |
                 VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) |
                 VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) |
                 VkFlags( ExternalFenceHandleTypeFlagBits::eSyncFd )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags
                                         operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalFenceHandleTypeFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags
                                         operator&(ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ExternalFenceHandleTypeFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags
                                         operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( ExternalFenceHandleTypeFlags( bits ) );
  }

  using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;

  VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd )
      result += "OpaqueFd | ";
    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 )
      result += "OpaqueWin32 | ";
    if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt )
      result += "OpaqueWin32Kmt | ";
    if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd )
      result += "SyncFd | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits>;

  template <>
  struct FlagTraits<ExternalFenceFeatureFlagBits>
  {
    enum : VkFlags
    {
      allFlags =
        VkFlags( ExternalFenceFeatureFlagBits::eExportable ) | VkFlags( ExternalFenceFeatureFlagBits::eImportable )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags
                                         operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalFenceFeatureFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags
                                         operator&(ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ExternalFenceFeatureFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags
                                         operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalFenceFeatureFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( ExternalFenceFeatureFlags( bits ) );
  }

  using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;

  VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ExternalFenceFeatureFlagBits::eExportable )
      result += "Exportable | ";
    if ( value & ExternalFenceFeatureFlagBits::eImportable )
      result += "Importable | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using FenceImportFlags = Flags<FenceImportFlagBits>;

  template <>
  struct FlagTraits<FenceImportFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( FenceImportFlagBits::eTemporary )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0,
                                                                     FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return FenceImportFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&(FenceImportFlagBits bit0,
                                                                    FenceImportFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return FenceImportFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0,
                                                                     FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return FenceImportFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator~( FenceImportFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( FenceImportFlags( bits ) );
  }

  using FenceImportFlagsKHR = FenceImportFlags;

  VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & FenceImportFlagBits::eTemporary )
      result += "Temporary | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits>;

  template <>
  struct FlagTraits<SemaphoreImportFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SemaphoreImportFlagBits::eTemporary )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags
                                         operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SemaphoreImportFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&(SemaphoreImportFlagBits bit0,
                                                                        SemaphoreImportFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return SemaphoreImportFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags
                                         operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SemaphoreImportFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( SemaphoreImportFlags( bits ) );
  }

  using SemaphoreImportFlagsKHR = SemaphoreImportFlags;

  VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SemaphoreImportFlagBits::eTemporary )
      result += "Temporary | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits>;

  template <>
  struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) |
                 VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) |
                 VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) |
                 VkFlags( ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) |
                 VkFlags( ExternalSemaphoreHandleTypeFlagBits::eSyncFd )
#if defined( VK_USE_PLATFORM_FUCHSIA )
                 | VkFlags( ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA )
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags
                                         operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags
                                         operator&(ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags
                                         operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags
                                         operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( ExternalSemaphoreHandleTypeFlags( bits ) );
  }

  using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;

  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
      result += "OpaqueFd | ";
    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 )
      result += "OpaqueWin32 | ";
    if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt )
      result += "OpaqueWin32Kmt | ";
    if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence )
      result += "D3D12Fence | ";
    if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd )
      result += "SyncFd | ";
#if defined( VK_USE_PLATFORM_FUCHSIA )
    if ( value & ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA )
      result += "ZirconEventFUCHSIA | ";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits>;

  template <>
  struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ExternalSemaphoreFeatureFlagBits::eExportable ) |
                 VkFlags( ExternalSemaphoreFeatureFlagBits::eImportable )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags
                                         operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalSemaphoreFeatureFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags
                                         operator&(ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ExternalSemaphoreFeatureFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags
                                         operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags
                                         operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( ExternalSemaphoreFeatureFlags( bits ) );
  }

  using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;

  VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ExternalSemaphoreFeatureFlagBits::eExportable )
      result += "Exportable | ";
    if ( value & ExternalSemaphoreFeatureFlagBits::eImportable )
      result += "Importable | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_VERSION_1_2 ===

  using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits>;

  template <>
  struct FlagTraits<DescriptorBindingFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DescriptorBindingFlagBits::eUpdateAfterBind ) |
                 VkFlags( DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) |
                 VkFlags( DescriptorBindingFlagBits::ePartiallyBound ) |
                 VkFlags( DescriptorBindingFlagBits::eVariableDescriptorCount )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags
                                         operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DescriptorBindingFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags
                                         operator&(DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return DescriptorBindingFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags
                                         operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DescriptorBindingFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( DescriptorBindingFlags( bits ) );
  }

  using DescriptorBindingFlagsEXT = DescriptorBindingFlags;

  VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DescriptorBindingFlagBits::eUpdateAfterBind )
      result += "UpdateAfterBind | ";
    if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending )
      result += "UpdateUnusedWhilePending | ";
    if ( value & DescriptorBindingFlagBits::ePartiallyBound )
      result += "PartiallyBound | ";
    if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount )
      result += "VariableDescriptorCount | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ResolveModeFlags = Flags<ResolveModeFlagBits>;

  template <>
  struct FlagTraits<ResolveModeFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ResolveModeFlagBits::eNone ) | VkFlags( ResolveModeFlagBits::eSampleZero ) |
                 VkFlags( ResolveModeFlagBits::eAverage ) | VkFlags( ResolveModeFlagBits::eMin ) |
                 VkFlags( ResolveModeFlagBits::eMax )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0,
                                                                     ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ResolveModeFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&(ResolveModeFlagBits bit0,
                                                                    ResolveModeFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return ResolveModeFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0,
                                                                     ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ResolveModeFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator~( ResolveModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( ResolveModeFlags( bits ) );
  }

  using ResolveModeFlagsKHR = ResolveModeFlags;

  VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ResolveModeFlagBits::eSampleZero )
      result += "SampleZero | ";
    if ( value & ResolveModeFlagBits::eAverage )
      result += "Average | ";
    if ( value & ResolveModeFlagBits::eMin )
      result += "Min | ";
    if ( value & ResolveModeFlagBits::eMax )
      result += "Max | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits>;

  template <>
  struct FlagTraits<SemaphoreWaitFlagBits>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SemaphoreWaitFlagBits::eAny )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0,
                                                                       SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SemaphoreWaitFlags( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&(SemaphoreWaitFlagBits bit0,
                                                                      SemaphoreWaitFlagBits bit1)VULKAN_HPP_NOEXCEPT
  {
    return SemaphoreWaitFlags( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0,
                                                                       SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SemaphoreWaitFlags( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator~( SemaphoreWaitFlagBits bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( SemaphoreWaitFlags( bits ) );
  }

  using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags;

  VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SemaphoreWaitFlagBits::eAny )
      result += "Any | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_KHR_surface ===

  using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR>;

  template <>
  struct FlagTraits<CompositeAlphaFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( CompositeAlphaFlagBitsKHR::eOpaque ) | VkFlags( CompositeAlphaFlagBitsKHR::ePreMultiplied ) |
                 VkFlags( CompositeAlphaFlagBitsKHR::ePostMultiplied ) | VkFlags( CompositeAlphaFlagBitsKHR::eInherit )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR
                                         operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CompositeAlphaFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR
                                         operator&(CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return CompositeAlphaFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR
                                         operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return CompositeAlphaFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( CompositeAlphaFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & CompositeAlphaFlagBitsKHR::eOpaque )
      result += "Opaque | ";
    if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied )
      result += "PreMultiplied | ";
    if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied )
      result += "PostMultiplied | ";
    if ( value & CompositeAlphaFlagBitsKHR::eInherit )
      result += "Inherit | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_KHR_swapchain ===

  using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR>;

  template <>
  struct FlagTraits<SwapchainCreateFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) |
                 VkFlags( SwapchainCreateFlagBitsKHR::eProtected ) |
                 VkFlags( SwapchainCreateFlagBitsKHR::eMutableFormat )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR
                                         operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SwapchainCreateFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR
                                         operator&(SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return SwapchainCreateFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR
                                         operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SwapchainCreateFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( SwapchainCreateFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions )
      result += "SplitInstanceBindRegions | ";
    if ( value & SwapchainCreateFlagBitsKHR::eProtected )
      result += "Protected | ";
    if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat )
      result += "MutableFormat | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR>;

  template <>
  struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocal ) |
                 VkFlags( DeviceGroupPresentModeFlagBitsKHR::eRemote ) |
                 VkFlags( DeviceGroupPresentModeFlagBitsKHR::eSum ) |
                 VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR
                                         operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR
                                         operator&(DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR
                                         operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR
                                         operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( DeviceGroupPresentModeFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal )
      result += "Local | ";
    if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote )
      result += "Remote | ";
    if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum )
      result += "Sum | ";
    if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice )
      result += "LocalMultiDevice | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_KHR_display ===

  enum class DisplayModeCreateFlagBitsKHR : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR )
  {
    return "(void)";
  }

  using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR )
  {
    return "{}";
  }

  using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR>;

  template <>
  struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DisplayPlaneAlphaFlagBitsKHR::eOpaque ) | VkFlags( DisplayPlaneAlphaFlagBitsKHR::eGlobal ) |
                 VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) |
                 VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR
                                         operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR
                                         operator&(DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR
                                         operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque )
      result += "Opaque | ";
    if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal )
      result += "Global | ";
    if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel )
      result += "PerPixel | ";
    if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied )
      result += "PerPixelPremultiplied | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR )
  {
    return "(void)";
  }

  using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR )
  {
    return "{}";
  }

  using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR>;

  template <>
  struct FlagTraits<SurfaceTransformFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SurfaceTransformFlagBitsKHR::eIdentity ) | VkFlags( SurfaceTransformFlagBitsKHR::eRotate90 ) |
                 VkFlags( SurfaceTransformFlagBitsKHR::eRotate180 ) |
                 VkFlags( SurfaceTransformFlagBitsKHR::eRotate270 ) |
                 VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirror ) |
                 VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) |
                 VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) |
                 VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) |
                 VkFlags( SurfaceTransformFlagBitsKHR::eInherit )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR
                                         operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SurfaceTransformFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR
                                         operator&(SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return SurfaceTransformFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR
                                         operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SurfaceTransformFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( SurfaceTransformFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SurfaceTransformFlagBitsKHR::eIdentity )
      result += "Identity | ";
    if ( value & SurfaceTransformFlagBitsKHR::eRotate90 )
      result += "Rotate90 | ";
    if ( value & SurfaceTransformFlagBitsKHR::eRotate180 )
      result += "Rotate180 | ";
    if ( value & SurfaceTransformFlagBitsKHR::eRotate270 )
      result += "Rotate270 | ";
    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror )
      result += "HorizontalMirror | ";
    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 )
      result += "HorizontalMirrorRotate90 | ";
    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 )
      result += "HorizontalMirrorRotate180 | ";
    if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 )
      result += "HorizontalMirrorRotate270 | ";
    if ( value & SurfaceTransformFlagBitsKHR::eInherit )
      result += "Inherit | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

#if defined( VK_USE_PLATFORM_XLIB_KHR )
  //=== VK_KHR_xlib_surface ===

  enum class XlibSurfaceCreateFlagBitsKHR : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR )
  {
    return "(void)";
  }

  using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_XLIB_KHR*/

#if defined( VK_USE_PLATFORM_XCB_KHR )
  //=== VK_KHR_xcb_surface ===

  enum class XcbSurfaceCreateFlagBitsKHR : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR )
  {
    return "(void)";
  }

  using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_XCB_KHR*/

#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  //=== VK_KHR_wayland_surface ===

  enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR )
  {
    return "(void)";
  }

  using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  //=== VK_KHR_android_surface ===

  enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR )
  {
    return "(void)";
  }

  using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  //=== VK_KHR_win32_surface ===

  enum class Win32SurfaceCreateFlagBitsKHR : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR )
  {
    return "(void)";
  }

  using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  //=== VK_EXT_debug_report ===

  using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT>;

  template <>
  struct FlagTraits<DebugReportFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DebugReportFlagBitsEXT::eInformation ) | VkFlags( DebugReportFlagBitsEXT::eWarning ) |
                 VkFlags( DebugReportFlagBitsEXT::ePerformanceWarning ) | VkFlags( DebugReportFlagBitsEXT::eError ) |
                 VkFlags( DebugReportFlagBitsEXT::eDebug )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT
                                         operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DebugReportFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&(DebugReportFlagBitsEXT bit0,
                                                                       DebugReportFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return DebugReportFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT
                                         operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DebugReportFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( DebugReportFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DebugReportFlagBitsEXT::eInformation )
      result += "Information | ";
    if ( value & DebugReportFlagBitsEXT::eWarning )
      result += "Warning | ";
    if ( value & DebugReportFlagBitsEXT::ePerformanceWarning )
      result += "PerformanceWarning | ";
    if ( value & DebugReportFlagBitsEXT::eError )
      result += "Error | ";
    if ( value & DebugReportFlagBitsEXT::eDebug )
      result += "Debug | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_KHR_video_queue ===

  using VideoCodecOperationFlagsKHR = Flags<VideoCodecOperationFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoCodecOperationFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoCodecOperationFlagBitsKHR::eInvalid )
#  if defined( VK_ENABLE_BETA_EXTENSIONS )
                 | VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) |
                 VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) |
                 VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH265EXT )
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR
                                         operator|( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoCodecOperationFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR
                                         operator&(VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoCodecOperationFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR
                                         operator^( VideoCodecOperationFlagBitsKHR bit0, VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoCodecOperationFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR operator~( VideoCodecOperationFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoCodecOperationFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
#  if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264EXT )
      result += "EncodeH264EXT | ";
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/
#  if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264EXT )
      result += "DecodeH264EXT | ";
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/
#  if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265EXT )
      result += "DecodeH265EXT | ";
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoChromaSubsamplingFlagsKHR = Flags<VideoChromaSubsamplingFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoChromaSubsamplingFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoChromaSubsamplingFlagBitsKHR::eInvalid ) |
                 VkFlags( VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) |
                 VkFlags( VideoChromaSubsamplingFlagBitsKHR::e420 ) |
                 VkFlags( VideoChromaSubsamplingFlagBitsKHR::e422 ) | VkFlags( VideoChromaSubsamplingFlagBitsKHR::e444 )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR
                                         operator|( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoChromaSubsamplingFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR
                                         operator&(VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoChromaSubsamplingFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR
                                         operator^( VideoChromaSubsamplingFlagBitsKHR bit0, VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoChromaSubsamplingFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR
                                         operator~( VideoChromaSubsamplingFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoChromaSubsamplingFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome )
      result += "Monochrome | ";
    if ( value & VideoChromaSubsamplingFlagBitsKHR::e420 )
      result += "420 | ";
    if ( value & VideoChromaSubsamplingFlagBitsKHR::e422 )
      result += "422 | ";
    if ( value & VideoChromaSubsamplingFlagBitsKHR::e444 )
      result += "444 | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoComponentBitDepthFlagsKHR = Flags<VideoComponentBitDepthFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoComponentBitDepthFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoComponentBitDepthFlagBitsKHR::eInvalid ) |
                 VkFlags( VideoComponentBitDepthFlagBitsKHR::e8 ) | VkFlags( VideoComponentBitDepthFlagBitsKHR::e10 ) |
                 VkFlags( VideoComponentBitDepthFlagBitsKHR::e12 )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR
                                         operator|( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoComponentBitDepthFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR
                                         operator&(VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoComponentBitDepthFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR
                                         operator^( VideoComponentBitDepthFlagBitsKHR bit0, VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoComponentBitDepthFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR
                                         operator~( VideoComponentBitDepthFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoComponentBitDepthFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoComponentBitDepthFlagBitsKHR::e8 )
      result += "8 | ";
    if ( value & VideoComponentBitDepthFlagBitsKHR::e10 )
      result += "10 | ";
    if ( value & VideoComponentBitDepthFlagBitsKHR::e12 )
      result += "12 | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoCapabilitiesFlagsKHR = Flags<VideoCapabilitiesFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoCapabilitiesFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoCapabilitiesFlagBitsKHR::eProtectedContent ) |
                 VkFlags( VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR
                                         operator|( VideoCapabilitiesFlagBitsKHR bit0, VideoCapabilitiesFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoCapabilitiesFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR
                                         operator&(VideoCapabilitiesFlagBitsKHR bit0, VideoCapabilitiesFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoCapabilitiesFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR
                                         operator^( VideoCapabilitiesFlagBitsKHR bit0, VideoCapabilitiesFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoCapabilitiesFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilitiesFlagsKHR operator~( VideoCapabilitiesFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoCapabilitiesFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoCapabilitiesFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoCapabilitiesFlagBitsKHR::eProtectedContent )
      result += "ProtectedContent | ";
    if ( value & VideoCapabilitiesFlagBitsKHR::eSeparateReferenceImages )
      result += "SeparateReferenceImages | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoSessionCreateFlagsKHR = Flags<VideoSessionCreateFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoSessionCreateFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags =
        VkFlags( VideoSessionCreateFlagBitsKHR::eDefault ) | VkFlags( VideoSessionCreateFlagBitsKHR::eProtectedContent )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR
                                         operator|( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoSessionCreateFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR
                                         operator&(VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoSessionCreateFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR
                                         operator^( VideoSessionCreateFlagBitsKHR bit0, VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoSessionCreateFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR operator~( VideoSessionCreateFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoSessionCreateFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoSessionCreateFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent )
      result += "ProtectedContent | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class VideoBeginCodingFlagBitsKHR : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR )
  {
    return "(void)";
  }

  using VideoBeginCodingFlagsKHR = Flags<VideoBeginCodingFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR )
  {
    return "{}";
  }

  enum class VideoEndCodingFlagBitsKHR : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagBitsKHR )
  {
    return "(void)";
  }

  using VideoEndCodingFlagsKHR = Flags<VideoEndCodingFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( VideoEndCodingFlagsKHR )
  {
    return "{}";
  }

  using VideoCodingControlFlagsKHR = Flags<VideoCodingControlFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoCodingControlFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoCodingControlFlagBitsKHR::eDefault ) | VkFlags( VideoCodingControlFlagBitsKHR::eReset )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR
                                         operator|( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoCodingControlFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR
                                         operator&(VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoCodingControlFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR
                                         operator^( VideoCodingControlFlagBitsKHR bit0, VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoCodingControlFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR operator~( VideoCodingControlFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoCodingControlFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoCodingControlFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoCodingControlFlagBitsKHR::eReset )
      result += "Reset | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoCodingQualityPresetFlagsKHR = Flags<VideoCodingQualityPresetFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoCodingQualityPresetFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoCodingQualityPresetFlagBitsKHR::eDefault ) |
                 VkFlags( VideoCodingQualityPresetFlagBitsKHR::eNormal ) |
                 VkFlags( VideoCodingQualityPresetFlagBitsKHR::ePower ) |
                 VkFlags( VideoCodingQualityPresetFlagBitsKHR::eQuality )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR
                                         operator|( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoCodingQualityPresetFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR
                                         operator&(VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoCodingQualityPresetFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR
                                         operator^( VideoCodingQualityPresetFlagBitsKHR bit0, VideoCodingQualityPresetFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoCodingQualityPresetFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingQualityPresetFlagsKHR
                                         operator~( VideoCodingQualityPresetFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoCodingQualityPresetFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoCodingQualityPresetFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoCodingQualityPresetFlagBitsKHR::eNormal )
      result += "Normal | ";
    if ( value & VideoCodingQualityPresetFlagBitsKHR::ePower )
      result += "Power | ";
    if ( value & VideoCodingQualityPresetFlagBitsKHR::eQuality )
      result += "Quality | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_KHR_video_decode_queue ===

  using VideoDecodeFlagsKHR = Flags<VideoDecodeFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoDecodeFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoDecodeFlagBitsKHR::eDefault ) | VkFlags( VideoDecodeFlagBitsKHR::eReserved0 )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR
                                         operator|( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoDecodeFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR operator&(VideoDecodeFlagBitsKHR bit0,
                                                                       VideoDecodeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoDecodeFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR
                                         operator^( VideoDecodeFlagBitsKHR bit0, VideoDecodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoDecodeFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeFlagsKHR operator~( VideoDecodeFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoDecodeFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoDecodeFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoDecodeFlagBitsKHR::eReserved0 )
      result += "Reserved0 | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  //=== VK_EXT_transform_feedback ===

  enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT )
  {
    return "(void)";
  }

  using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT )
  {
    return "{}";
  }

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_EXT_video_encode_h264 ===

  using VideoEncodeH264CapabilitiesFlagsEXT = Flags<VideoEncodeH264CapabilitiesFlagBitsEXT>;

  template <>
  struct FlagTraits<VideoEncodeH264CapabilitiesFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags =
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityWeightedBiPredImplicit ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityTransform8X8 ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityChromaQpOffset ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilitySecondChromaQpOffset ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterDisabled ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterEnabled ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterPartial ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityMultipleSlicePerFrame ) |
        VkFlags( VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT operator|(
    VideoEncodeH264CapabilitiesFlagBitsEXT bit0, VideoEncodeH264CapabilitiesFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264CapabilitiesFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT operator&(
    VideoEncodeH264CapabilitiesFlagBitsEXT bit0, VideoEncodeH264CapabilitiesFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264CapabilitiesFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT operator^(
    VideoEncodeH264CapabilitiesFlagBitsEXT bit0, VideoEncodeH264CapabilitiesFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264CapabilitiesFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesFlagsEXT
                                         operator~( VideoEncodeH264CapabilitiesFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoEncodeH264CapabilitiesFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilitiesFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCabac )
      result += "VkVideoEncodeH264CapabilityCabac | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityCavlc )
      result += "VkVideoEncodeH264CapabilityCavlc | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityWeightedBiPredImplicit )
      result += "VkVideoEncodeH264CapabilityWeightedBiPredImplicit | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityTransform8X8 )
      result += "VkVideoEncodeH264CapabilityTransform8X8 | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityChromaQpOffset )
      result += "VkVideoEncodeH264CapabilityChromaQpOffset | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilitySecondChromaQpOffset )
      result += "VkVideoEncodeH264CapabilitySecondChromaQpOffset | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterDisabled )
      result += "VkVideoEncodeH264CapabilityDeblockingFilterDisabled | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterEnabled )
      result += "VkVideoEncodeH264CapabilityDeblockingFilterEnabled | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityDeblockingFilterPartial )
      result += "VkVideoEncodeH264CapabilityDeblockingFilterPartial | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityMultipleSlicePerFrame )
      result += "VkVideoEncodeH264CapabilityMultipleSlicePerFrame | ";
    if ( value & VideoEncodeH264CapabilitiesFlagBitsEXT::eVkVideoEncodeH264CapabilityEvenlyDistributedSliceSize )
      result += "VkVideoEncodeH264CapabilityEvenlyDistributedSliceSize | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoEncodeH264InputModeFlagsEXT = Flags<VideoEncodeH264InputModeFlagBitsEXT>;

  template <>
  struct FlagTraits<VideoEncodeH264InputModeFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eFrame ) |
                 VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eSlice ) |
                 VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eNonVcl )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT
                                         operator|( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264InputModeFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT
                                         operator&(VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264InputModeFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT
                                         operator^( VideoEncodeH264InputModeFlagBitsEXT bit0, VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264InputModeFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT
                                         operator~( VideoEncodeH264InputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoEncodeH264InputModeFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoEncodeH264InputModeFlagBitsEXT::eFrame )
      result += "Frame | ";
    if ( value & VideoEncodeH264InputModeFlagBitsEXT::eSlice )
      result += "Slice | ";
    if ( value & VideoEncodeH264InputModeFlagBitsEXT::eNonVcl )
      result += "NonVcl | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoEncodeH264OutputModeFlagsEXT = Flags<VideoEncodeH264OutputModeFlagBitsEXT>;

  template <>
  struct FlagTraits<VideoEncodeH264OutputModeFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) |
                 VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) |
                 VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator|(
    VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264OutputModeFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT
                                         operator&(VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264OutputModeFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator^(
    VideoEncodeH264OutputModeFlagBitsEXT bit0, VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264OutputModeFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT
                                         operator~( VideoEncodeH264OutputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoEncodeH264OutputModeFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eFrame )
      result += "Frame | ";
    if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eSlice )
      result += "Slice | ";
    if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl )
      result += "NonVcl | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoEncodeH264CreateFlagsEXT = Flags<VideoEncodeH264CreateFlagBitsEXT>;

  template <>
  struct FlagTraits<VideoEncodeH264CreateFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags =
        VkFlags( VideoEncodeH264CreateFlagBitsEXT::eDefault ) | VkFlags( VideoEncodeH264CreateFlagBitsEXT::eReserved0 )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT
                                         operator|( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264CreateFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT
                                         operator&(VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264CreateFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT
                                         operator^( VideoEncodeH264CreateFlagBitsEXT bit0, VideoEncodeH264CreateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeH264CreateFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CreateFlagsEXT
                                         operator~( VideoEncodeH264CreateFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoEncodeH264CreateFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CreateFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoEncodeH264CreateFlagBitsEXT::eReserved0 )
      result += "Reserved0 | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_EXT_video_decode_h264 ===

  using VideoDecodeH264FieldLayoutFlagsEXT = Flags<VideoDecodeH264FieldLayoutFlagBitsEXT>;

  template <>
  struct FlagTraits<VideoDecodeH264FieldLayoutFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eVkVideoDecodeH264ProgressivePicturesOnly ) |
                 VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eLineInterlacedPlane ) |
                 VkFlags( VideoDecodeH264FieldLayoutFlagBitsEXT::eSeparateInterlacedPlane )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT operator|(
    VideoDecodeH264FieldLayoutFlagBitsEXT bit0, VideoDecodeH264FieldLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoDecodeH264FieldLayoutFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT
                                         operator&(VideoDecodeH264FieldLayoutFlagBitsEXT bit0, VideoDecodeH264FieldLayoutFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoDecodeH264FieldLayoutFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT operator^(
    VideoDecodeH264FieldLayoutFlagBitsEXT bit0, VideoDecodeH264FieldLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoDecodeH264FieldLayoutFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264FieldLayoutFlagsEXT
                                         operator~( VideoDecodeH264FieldLayoutFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoDecodeH264FieldLayoutFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264FieldLayoutFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eLineInterlacedPlane )
      result += "LineInterlacedPlane | ";
    if ( value & VideoDecodeH264FieldLayoutFlagBitsEXT::eSeparateInterlacedPlane )
      result += "SeparateInterlacedPlane | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class VideoDecodeH264CreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagBitsEXT )
  {
    return "(void)";
  }

  using VideoDecodeH264CreateFlagsEXT = Flags<VideoDecodeH264CreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264CreateFlagsEXT )
  {
    return "{}";
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_USE_PLATFORM_GGP )
  //=== VK_GGP_stream_descriptor_surface ===

  enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP )
  {
    return "(void)";
  }

  using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP>;

  VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_GGP*/

  //=== VK_NV_external_memory_capabilities ===

  using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV>;

  template <>
  struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) |
                 VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV
                                         operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV
                                         operator&(ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV
                                         operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV
                                         operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 )
      result += "OpaqueWin32 | ";
    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt )
      result += "OpaqueWin32Kmt | ";
    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image )
      result += "D3D11Image | ";
    if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt )
      result += "D3D11ImageKmt | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV>;

  template <>
  struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) |
                 VkFlags( ExternalMemoryFeatureFlagBitsNV::eExportable ) |
                 VkFlags( ExternalMemoryFeatureFlagBitsNV::eImportable )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV
                                         operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV
                                         operator&(ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV
                                         operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( ExternalMemoryFeatureFlagsNV( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly )
      result += "DedicatedOnly | ";
    if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable )
      result += "Exportable | ";
    if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable )
      result += "Importable | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

#if defined( VK_USE_PLATFORM_VI_NN )
  //=== VK_NN_vi_surface ===

  enum class ViSurfaceCreateFlagBitsNN : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN )
  {
    return "(void)";
  }

  using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN>;

  VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_VI_NN*/

  //=== VK_EXT_conditional_rendering ===

  using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT>;

  template <>
  struct FlagTraits<ConditionalRenderingFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ConditionalRenderingFlagBitsEXT::eInverted )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT
                                         operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ConditionalRenderingFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT
                                         operator&(ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return ConditionalRenderingFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT
                                         operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( ConditionalRenderingFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ConditionalRenderingFlagBitsEXT::eInverted )
      result += "Inverted | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_EXT_display_surface_counter ===

  using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT>;

  template <>
  struct FlagTraits<SurfaceCounterFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SurfaceCounterFlagBitsEXT::eVblank )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT
                                         operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SurfaceCounterFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT
                                         operator&(SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return SurfaceCounterFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT
                                         operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SurfaceCounterFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( SurfaceCounterFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SurfaceCounterFlagBitsEXT::eVblank )
      result += "Vblank | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_NV_viewport_swizzle ===

  enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV )
  {
    return "(void)";
  }

  using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV>;

  VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV )
  {
    return "{}";
  }

  //=== VK_EXT_discard_rectangles ===

  enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT )
  {
    return "(void)";
  }

  using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT )
  {
    return "{}";
  }

  //=== VK_EXT_conservative_rasterization ===

  enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT )
  {
    return "(void)";
  }

  using PipelineRasterizationConservativeStateCreateFlagsEXT =
    Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT )
  {
    return "{}";
  }

  //=== VK_EXT_depth_clip_enable ===

  enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT )
  {
    return "(void)";
  }

  using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT )
  {
    return "{}";
  }

  //=== VK_KHR_performance_query ===

  using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR>;

  template <>
  struct FlagTraits<PerformanceCounterDescriptionFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) |
                 VkFlags( PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|(
    PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&(
    PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^(
    PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR
                                         operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( PerformanceCounterDescriptionFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting )
      result += "PerformanceImpacting | ";
    if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted )
      result += "ConcurrentlyImpacted | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR>;

  VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR )
  {
    return "{}";
  }

#if defined( VK_USE_PLATFORM_IOS_MVK )
  //=== VK_MVK_ios_surface ===

  enum class IOSSurfaceCreateFlagBitsMVK : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK )
  {
    return "(void)";
  }

  using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK>;

  VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_IOS_MVK*/

#if defined( VK_USE_PLATFORM_MACOS_MVK )
  //=== VK_MVK_macos_surface ===

  enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK )
  {
    return "(void)";
  }

  using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK>;

  VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_MACOS_MVK*/

  //=== VK_EXT_debug_utils ===

  using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT>;

  template <>
  struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) |
                 VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) |
                 VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) |
                 VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eError )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|(
    DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT
                                         operator&(DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^(
    DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT
                                         operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose )
      result += "Verbose | ";
    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo )
      result += "Info | ";
    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning )
      result += "Warning | ";
    if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError )
      result += "Error | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT>;

  template <>
  struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) |
                 VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eValidation ) |
                 VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT
                                         operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT
                                         operator&(DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT
                                         operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT
                                         operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( DebugUtilsMessageTypeFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral )
      result += "General | ";
    if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation )
      result += "Validation | ";
    if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance )
      result += "Performance | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT )
  {
    return "(void)";
  }

  using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT )
  {
    return "{}";
  }

  enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT )
  {
    return "(void)";
  }

  using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT )
  {
    return "{}";
  }

  //=== VK_NV_fragment_coverage_to_color ===

  enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV )
  {
    return "(void)";
  }

  using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV>;

  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV )
  {
    return "{}";
  }

  //=== VK_KHR_acceleration_structure ===

  using GeometryFlagsKHR = Flags<GeometryFlagBitsKHR>;

  template <>
  struct FlagTraits<GeometryFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( GeometryFlagBitsKHR::eOpaque ) | VkFlags( GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0,
                                                                     GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return GeometryFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&(GeometryFlagBitsKHR bit0,
                                                                    GeometryFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return GeometryFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0,
                                                                     GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return GeometryFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( GeometryFlagsKHR( bits ) );
  }

  using GeometryFlagsNV = GeometryFlagsKHR;

  VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & GeometryFlagBitsKHR::eOpaque )
      result += "Opaque | ";
    if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation )
      result += "NoDuplicateAnyHitInvocation | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using GeometryInstanceFlagsKHR = Flags<GeometryInstanceFlagBitsKHR>;

  template <>
  struct FlagTraits<GeometryInstanceFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) |
                 VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) |
                 VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) |
                 VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR
                                         operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return GeometryInstanceFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR
                                         operator&(GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return GeometryInstanceFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR
                                         operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return GeometryInstanceFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( GeometryInstanceFlagsKHR( bits ) );
  }

  using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR;

  VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable )
      result += "TriangleFacingCullDisable | ";
    if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise )
      result += "TriangleFrontCounterclockwise | ";
    if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque )
      result += "ForceOpaque | ";
    if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque )
      result += "ForceNoOpaque | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using BuildAccelerationStructureFlagsKHR = Flags<BuildAccelerationStructureFlagBitsKHR>;

  template <>
  struct FlagTraits<BuildAccelerationStructureFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) |
                 VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) |
                 VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) |
                 VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) |
                 VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|(
    BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR
                                         operator&(BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^(
    BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR
                                         operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( BuildAccelerationStructureFlagsKHR( bits ) );
  }

  using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR;

  VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate )
      result += "AllowUpdate | ";
    if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction )
      result += "AllowCompaction | ";
    if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace )
      result += "PreferFastTrace | ";
    if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild )
      result += "PreferFastBuild | ";
    if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory )
      result += "LowMemory | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using AccelerationStructureCreateFlagsKHR = Flags<AccelerationStructureCreateFlagBitsKHR>;

  template <>
  struct FlagTraits<AccelerationStructureCreateFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator|(
    AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return AccelerationStructureCreateFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator&(
    AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return AccelerationStructureCreateFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator^(
    AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return AccelerationStructureCreateFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR
                                         operator~( AccelerationStructureCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( AccelerationStructureCreateFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay )
      result += "DeviceAddressCaptureReplay | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_NV_framebuffer_mixed_samples ===

  enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV )
  {
    return "(void)";
  }

  using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV>;

  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV )
  {
    return "{}";
  }

  //=== VK_EXT_validation_cache ===

  enum class ValidationCacheCreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT )
  {
    return "(void)";
  }

  using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT )
  {
    return "{}";
  }

  //=== VK_AMD_pipeline_compiler_control ===

  using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD>;

  VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD )
  {
    return "{}";
  }

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_EXT_video_decode_h265 ===

  enum class VideoDecodeH265CreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagBitsEXT )
  {
    return "(void)";
  }

  using VideoDecodeH265CreateFlagsEXT = Flags<VideoDecodeH265CreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( VideoDecodeH265CreateFlagsEXT )
  {
    return "{}";
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  //=== VK_EXT_pipeline_creation_feedback ===

  using PipelineCreationFeedbackFlagsEXT = Flags<PipelineCreationFeedbackFlagBitsEXT>;

  template <>
  struct FlagTraits<PipelineCreationFeedbackFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( PipelineCreationFeedbackFlagBitsEXT::eValid ) |
                 VkFlags( PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) |
                 VkFlags( PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT
                                         operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT
                                         operator&(PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return PipelineCreationFeedbackFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT
                                         operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineCreationFeedbackFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT
                                         operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( PipelineCreationFeedbackFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid )
      result += "Valid | ";
    if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit )
      result += "ApplicationPipelineCacheHit | ";
    if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration )
      result += "BasePipelineAcceleration | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

#if defined( VK_USE_PLATFORM_FUCHSIA )
  //=== VK_FUCHSIA_imagepipe_surface ===

  enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA )
  {
    return "(void)";
  }

  using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA>;

  VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  //=== VK_EXT_metal_surface ===

  enum class MetalSurfaceCreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT )
  {
    return "(void)";
  }

  using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_METAL_EXT*/

  //=== VK_AMD_shader_core_properties2 ===

  using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD>;

  VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD )
  {
    return "{}";
  }

  //=== VK_EXT_tooling_info ===

  using ToolPurposeFlagsEXT = Flags<ToolPurposeFlagBitsEXT>;

  template <>
  struct FlagTraits<ToolPurposeFlagBitsEXT>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( ToolPurposeFlagBitsEXT::eValidation ) | VkFlags( ToolPurposeFlagBitsEXT::eProfiling ) |
                 VkFlags( ToolPurposeFlagBitsEXT::eTracing ) | VkFlags( ToolPurposeFlagBitsEXT::eAdditionalFeatures ) |
                 VkFlags( ToolPurposeFlagBitsEXT::eModifyingFeatures ) |
                 VkFlags( ToolPurposeFlagBitsEXT::eDebugReporting ) | VkFlags( ToolPurposeFlagBitsEXT::eDebugMarkers )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT
                                         operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ToolPurposeFlagsEXT( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator&(ToolPurposeFlagBitsEXT bit0,
                                                                       ToolPurposeFlagBitsEXT bit1)VULKAN_HPP_NOEXCEPT
  {
    return ToolPurposeFlagsEXT( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT
                                         operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return ToolPurposeFlagsEXT( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( ToolPurposeFlagsEXT( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & ToolPurposeFlagBitsEXT::eValidation )
      result += "Validation | ";
    if ( value & ToolPurposeFlagBitsEXT::eProfiling )
      result += "Profiling | ";
    if ( value & ToolPurposeFlagBitsEXT::eTracing )
      result += "Tracing | ";
    if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures )
      result += "AdditionalFeatures | ";
    if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures )
      result += "ModifyingFeatures | ";
    if ( value & ToolPurposeFlagBitsEXT::eDebugReporting )
      result += "DebugReporting | ";
    if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers )
      result += "DebugMarkers | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_NV_coverage_reduction_mode ===

  enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV )
  {
    return "(void)";
  }

  using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV>;

  VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV )
  {
    return "{}";
  }

  //=== VK_EXT_headless_surface ===

  enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT )
  {
    return "(void)";
  }

  using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT )
  {
    return "{}";
  }

  //=== VK_NV_device_generated_commands ===

  using IndirectStateFlagsNV = Flags<IndirectStateFlagBitsNV>;

  template <>
  struct FlagTraits<IndirectStateFlagBitsNV>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( IndirectStateFlagBitsNV::eFlagFrontface )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV
                                         operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return IndirectStateFlagsNV( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&(IndirectStateFlagBitsNV bit0,
                                                                        IndirectStateFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT
  {
    return IndirectStateFlagsNV( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV
                                         operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return IndirectStateFlagsNV( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( IndirectStateFlagsNV( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & IndirectStateFlagBitsNV::eFlagFrontface )
      result += "FlagFrontface | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using IndirectCommandsLayoutUsageFlagsNV = Flags<IndirectCommandsLayoutUsageFlagBitsNV>;

  template <>
  struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNV>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) |
                 VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) |
                 VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|(
    IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV
                                         operator&(IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT
  {
    return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^(
    IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV
                                         operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess )
      result += "ExplicitPreprocess | ";
    if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences )
      result += "IndexedSequences | ";
    if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences )
      result += "UnorderedSequences | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_EXT_device_memory_report ===

  enum class DeviceMemoryReportFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT )
  {
    return "(void)";
  }

  using DeviceMemoryReportFlagsEXT = Flags<DeviceMemoryReportFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT )
  {
    return "{}";
  }

  //=== VK_EXT_private_data ===

  using PrivateDataSlotCreateFlagsEXT = Flags<PrivateDataSlotCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagsEXT )
  {
    return "{}";
  }

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_KHR_video_encode_queue ===

  using VideoEncodeFlagsKHR = Flags<VideoEncodeFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoEncodeFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoEncodeFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeFlagBitsKHR::eReserved0 )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR
                                         operator|( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR operator&(VideoEncodeFlagBitsKHR bit0,
                                                                       VideoEncodeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR
                                         operator^( VideoEncodeFlagBitsKHR bit0, VideoEncodeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeFlagsKHR operator~( VideoEncodeFlagBitsKHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoEncodeFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoEncodeFlagBitsKHR::eReserved0 )
      result += "Reserved0 | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoEncodeRateControlFlagsKHR = Flags<VideoEncodeRateControlFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoEncodeRateControlFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags =
        VkFlags( VideoEncodeRateControlFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeRateControlFlagBitsKHR::eReset )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR
                                         operator|( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeRateControlFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR
                                         operator&(VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeRateControlFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR
                                         operator^( VideoEncodeRateControlFlagBitsKHR bit0, VideoEncodeRateControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeRateControlFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlFlagsKHR
                                         operator~( VideoEncodeRateControlFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoEncodeRateControlFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & VideoEncodeRateControlFlagBitsKHR::eReset )
      result += "Reset | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using VideoEncodeRateControlModeFlagsKHR = Flags<VideoEncodeRateControlModeFlagBitsKHR>;

  template <>
  struct FlagTraits<VideoEncodeRateControlModeFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eNone ) |
                 VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eCbr ) |
                 VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eVbr )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator|(
    VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeRateControlModeFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR
                                         operator&(VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeRateControlModeFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator^(
    VideoEncodeRateControlModeFlagBitsKHR bit0, VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return VideoEncodeRateControlModeFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR
                                         operator~( VideoEncodeRateControlModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( VideoEncodeRateControlModeFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  //=== VK_NV_device_diagnostics_config ===

  using DeviceDiagnosticsConfigFlagsNV = Flags<DeviceDiagnosticsConfigFlagBitsNV>;

  template <>
  struct FlagTraits<DeviceDiagnosticsConfigFlagBitsNV>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) |
                 VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) |
                 VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV
                                         operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV
                                         operator&(DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1)VULKAN_HPP_NOEXCEPT
  {
    return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV
                                         operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV
                                         operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( DeviceDiagnosticsConfigFlagsNV( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo )
      result += "EnableShaderDebugInfo | ";
    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking )
      result += "EnableResourceTracking | ";
    if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints )
      result += "EnableAutomaticCheckpoints | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  //=== VK_KHR_synchronization2 ===

  using PipelineStageFlags2KHR = Flags<PipelineStageFlagBits2KHR>;

  template <>
  struct FlagTraits<PipelineStageFlagBits2KHR>
  {
    enum : VkFlags64
    {
      allFlags =
        VkFlags64( PipelineStageFlagBits2KHR::eNone ) | VkFlags64( PipelineStageFlagBits2KHR::eTopOfPipe ) |
        VkFlags64( PipelineStageFlagBits2KHR::eDrawIndirect ) | VkFlags64( PipelineStageFlagBits2KHR::eVertexInput ) |
        VkFlags64( PipelineStageFlagBits2KHR::eVertexShader ) |
        VkFlags64( PipelineStageFlagBits2KHR::eTessellationControlShader ) |
        VkFlags64( PipelineStageFlagBits2KHR::eTessellationEvaluationShader ) |
        VkFlags64( PipelineStageFlagBits2KHR::eGeometryShader ) |
        VkFlags64( PipelineStageFlagBits2KHR::eFragmentShader ) |
        VkFlags64( PipelineStageFlagBits2KHR::eEarlyFragmentTests ) |
        VkFlags64( PipelineStageFlagBits2KHR::eLateFragmentTests ) |
        VkFlags64( PipelineStageFlagBits2KHR::eColorAttachmentOutput ) |
        VkFlags64( PipelineStageFlagBits2KHR::eComputeShader ) | VkFlags64( PipelineStageFlagBits2KHR::eAllTransfer ) |
        VkFlags64( PipelineStageFlagBits2KHR::eBottomOfPipe ) | VkFlags64( PipelineStageFlagBits2KHR::eHost ) |
        VkFlags64( PipelineStageFlagBits2KHR::eAllGraphics ) | VkFlags64( PipelineStageFlagBits2KHR::eAllCommands ) |
        VkFlags64( PipelineStageFlagBits2KHR::eCopy ) | VkFlags64( PipelineStageFlagBits2KHR::eResolve ) |
        VkFlags64( PipelineStageFlagBits2KHR::eBlit ) | VkFlags64( PipelineStageFlagBits2KHR::eClear ) |
        VkFlags64( PipelineStageFlagBits2KHR::eIndexInput ) |
        VkFlags64( PipelineStageFlagBits2KHR::eVertexAttributeInput ) |
        VkFlags64( PipelineStageFlagBits2KHR::ePreRasterizationShaders )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
        | VkFlags64( PipelineStageFlagBits2KHR::eVideoDecode ) | VkFlags64( PipelineStageFlagBits2KHR::eVideoEncode )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
        | VkFlags64( PipelineStageFlagBits2KHR::eTransformFeedbackEXT ) |
        VkFlags64( PipelineStageFlagBits2KHR::eConditionalRenderingEXT ) |
        VkFlags64( PipelineStageFlagBits2KHR::eCommandPreprocessNV ) |
        VkFlags64( PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment ) |
        VkFlags64( PipelineStageFlagBits2KHR::eAccelerationStructureBuild ) |
        VkFlags64( PipelineStageFlagBits2KHR::eRayTracingShader ) |
        VkFlags64( PipelineStageFlagBits2KHR::eFragmentDensityProcessEXT ) |
        VkFlags64( PipelineStageFlagBits2KHR::eTaskShaderNV ) | VkFlags64( PipelineStageFlagBits2KHR::eMeshShaderNV )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR
                                         operator|( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineStageFlags2KHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR
                                         operator&(PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return PipelineStageFlags2KHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR
                                         operator^( PipelineStageFlagBits2KHR bit0, PipelineStageFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return PipelineStageFlags2KHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2KHR operator~( PipelineStageFlagBits2KHR bits )
    VULKAN_HPP_NOEXCEPT
  {
    return ~( PipelineStageFlags2KHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2KHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & PipelineStageFlagBits2KHR::eTopOfPipe )
      result += "TopOfPipe | ";
    if ( value & PipelineStageFlagBits2KHR::eDrawIndirect )
      result += "DrawIndirect | ";
    if ( value & PipelineStageFlagBits2KHR::eVertexInput )
      result += "VertexInput | ";
    if ( value & PipelineStageFlagBits2KHR::eVertexShader )
      result += "VertexShader | ";
    if ( value & PipelineStageFlagBits2KHR::eTessellationControlShader )
      result += "TessellationControlShader | ";
    if ( value & PipelineStageFlagBits2KHR::eTessellationEvaluationShader )
      result += "TessellationEvaluationShader | ";
    if ( value & PipelineStageFlagBits2KHR::eGeometryShader )
      result += "GeometryShader | ";
    if ( value & PipelineStageFlagBits2KHR::eFragmentShader )
      result += "FragmentShader | ";
    if ( value & PipelineStageFlagBits2KHR::eEarlyFragmentTests )
      result += "EarlyFragmentTests | ";
    if ( value & PipelineStageFlagBits2KHR::eLateFragmentTests )
      result += "LateFragmentTests | ";
    if ( value & PipelineStageFlagBits2KHR::eColorAttachmentOutput )
      result += "ColorAttachmentOutput | ";
    if ( value & PipelineStageFlagBits2KHR::eComputeShader )
      result += "ComputeShader | ";
    if ( value & PipelineStageFlagBits2KHR::eAllTransfer )
      result += "AllTransfer | ";
    if ( value & PipelineStageFlagBits2KHR::eBottomOfPipe )
      result += "BottomOfPipe | ";
    if ( value & PipelineStageFlagBits2KHR::eHost )
      result += "Host | ";
    if ( value & PipelineStageFlagBits2KHR::eAllGraphics )
      result += "AllGraphics | ";
    if ( value & PipelineStageFlagBits2KHR::eAllCommands )
      result += "AllCommands | ";
    if ( value & PipelineStageFlagBits2KHR::eCopy )
      result += "Copy | ";
    if ( value & PipelineStageFlagBits2KHR::eResolve )
      result += "Resolve | ";
    if ( value & PipelineStageFlagBits2KHR::eBlit )
      result += "Blit | ";
    if ( value & PipelineStageFlagBits2KHR::eClear )
      result += "Clear | ";
    if ( value & PipelineStageFlagBits2KHR::eIndexInput )
      result += "IndexInput | ";
    if ( value & PipelineStageFlagBits2KHR::eVertexAttributeInput )
      result += "VertexAttributeInput | ";
    if ( value & PipelineStageFlagBits2KHR::ePreRasterizationShaders )
      result += "PreRasterizationShaders | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & PipelineStageFlagBits2KHR::eVideoDecode )
      result += "VideoDecode | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & PipelineStageFlagBits2KHR::eVideoEncode )
      result += "VideoEncode | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    if ( value & PipelineStageFlagBits2KHR::eTransformFeedbackEXT )
      result += "TransformFeedbackEXT | ";
    if ( value & PipelineStageFlagBits2KHR::eConditionalRenderingEXT )
      result += "ConditionalRenderingEXT | ";
    if ( value & PipelineStageFlagBits2KHR::eCommandPreprocessNV )
      result += "CommandPreprocessNV | ";
    if ( value & PipelineStageFlagBits2KHR::eFragmentShadingRateAttachment )
      result += "FragmentShadingRateAttachment | ";
    if ( value & PipelineStageFlagBits2KHR::eAccelerationStructureBuild )
      result += "AccelerationStructureBuild | ";
    if ( value & PipelineStageFlagBits2KHR::eRayTracingShader )
      result += "RayTracingShader | ";
    if ( value & PipelineStageFlagBits2KHR::eFragmentDensityProcessEXT )
      result += "FragmentDensityProcessEXT | ";
    if ( value & PipelineStageFlagBits2KHR::eTaskShaderNV )
      result += "TaskShaderNV | ";
    if ( value & PipelineStageFlagBits2KHR::eMeshShaderNV )
      result += "MeshShaderNV | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using AccessFlags2KHR = Flags<AccessFlagBits2KHR>;

  template <>
  struct FlagTraits<AccessFlagBits2KHR>
  {
    enum : VkFlags64
    {
      allFlags =
        VkFlags64( AccessFlagBits2KHR::eNone ) | VkFlags64( AccessFlagBits2KHR::eIndirectCommandRead ) |
        VkFlags64( AccessFlagBits2KHR::eIndexRead ) | VkFlags64( AccessFlagBits2KHR::eVertexAttributeRead ) |
        VkFlags64( AccessFlagBits2KHR::eUniformRead ) | VkFlags64( AccessFlagBits2KHR::eInputAttachmentRead ) |
        VkFlags64( AccessFlagBits2KHR::eShaderRead ) | VkFlags64( AccessFlagBits2KHR::eShaderWrite ) |
        VkFlags64( AccessFlagBits2KHR::eColorAttachmentRead ) | VkFlags64( AccessFlagBits2KHR::eColorAttachmentWrite ) |
        VkFlags64( AccessFlagBits2KHR::eDepthStencilAttachmentRead ) |
        VkFlags64( AccessFlagBits2KHR::eDepthStencilAttachmentWrite ) | VkFlags64( AccessFlagBits2KHR::eTransferRead ) |
        VkFlags64( AccessFlagBits2KHR::eTransferWrite ) | VkFlags64( AccessFlagBits2KHR::eHostRead ) |
        VkFlags64( AccessFlagBits2KHR::eHostWrite ) | VkFlags64( AccessFlagBits2KHR::eMemoryRead ) |
        VkFlags64( AccessFlagBits2KHR::eMemoryWrite ) | VkFlags64( AccessFlagBits2KHR::eShaderSampledRead ) |
        VkFlags64( AccessFlagBits2KHR::eShaderStorageRead ) | VkFlags64( AccessFlagBits2KHR::eShaderStorageWrite )
#if defined( VK_ENABLE_BETA_EXTENSIONS )
        | VkFlags64( AccessFlagBits2KHR::eVideoDecodeRead ) | VkFlags64( AccessFlagBits2KHR::eVideoDecodeWrite ) |
        VkFlags64( AccessFlagBits2KHR::eVideoEncodeRead ) | VkFlags64( AccessFlagBits2KHR::eVideoEncodeWrite )
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
        | VkFlags64( AccessFlagBits2KHR::eTransformFeedbackWriteEXT ) |
        VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterReadEXT ) |
        VkFlags64( AccessFlagBits2KHR::eTransformFeedbackCounterWriteEXT ) |
        VkFlags64( AccessFlagBits2KHR::eConditionalRenderingReadEXT ) |
        VkFlags64( AccessFlagBits2KHR::eCommandPreprocessReadNV ) |
        VkFlags64( AccessFlagBits2KHR::eCommandPreprocessWriteNV ) |
        VkFlags64( AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead ) |
        VkFlags64( AccessFlagBits2KHR::eAccelerationStructureRead ) |
        VkFlags64( AccessFlagBits2KHR::eAccelerationStructureWrite ) |
        VkFlags64( AccessFlagBits2KHR::eFragmentDensityMapReadEXT ) |
        VkFlags64( AccessFlagBits2KHR::eColorAttachmentReadNoncoherentEXT )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator|( AccessFlagBits2KHR bit0,
                                                                    AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return AccessFlags2KHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator&(AccessFlagBits2KHR bit0,
                                                                   AccessFlagBits2KHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return AccessFlags2KHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator^( AccessFlagBits2KHR bit0,
                                                                    AccessFlagBits2KHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return AccessFlags2KHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2KHR operator~( AccessFlagBits2KHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( AccessFlags2KHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( AccessFlags2KHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & AccessFlagBits2KHR::eIndirectCommandRead )
      result += "IndirectCommandRead | ";
    if ( value & AccessFlagBits2KHR::eIndexRead )
      result += "IndexRead | ";
    if ( value & AccessFlagBits2KHR::eVertexAttributeRead )
      result += "VertexAttributeRead | ";
    if ( value & AccessFlagBits2KHR::eUniformRead )
      result += "UniformRead | ";
    if ( value & AccessFlagBits2KHR::eInputAttachmentRead )
      result += "InputAttachmentRead | ";
    if ( value & AccessFlagBits2KHR::eShaderRead )
      result += "ShaderRead | ";
    if ( value & AccessFlagBits2KHR::eShaderWrite )
      result += "ShaderWrite | ";
    if ( value & AccessFlagBits2KHR::eColorAttachmentRead )
      result += "ColorAttachmentRead | ";
    if ( value & AccessFlagBits2KHR::eColorAttachmentWrite )
      result += "ColorAttachmentWrite | ";
    if ( value & AccessFlagBits2KHR::eDepthStencilAttachmentRead )
      result += "DepthStencilAttachmentRead | ";
    if ( value & AccessFlagBits2KHR::eDepthStencilAttachmentWrite )
      result += "DepthStencilAttachmentWrite | ";
    if ( value & AccessFlagBits2KHR::eTransferRead )
      result += "TransferRead | ";
    if ( value & AccessFlagBits2KHR::eTransferWrite )
      result += "TransferWrite | ";
    if ( value & AccessFlagBits2KHR::eHostRead )
      result += "HostRead | ";
    if ( value & AccessFlagBits2KHR::eHostWrite )
      result += "HostWrite | ";
    if ( value & AccessFlagBits2KHR::eMemoryRead )
      result += "MemoryRead | ";
    if ( value & AccessFlagBits2KHR::eMemoryWrite )
      result += "MemoryWrite | ";
    if ( value & AccessFlagBits2KHR::eShaderSampledRead )
      result += "ShaderSampledRead | ";
    if ( value & AccessFlagBits2KHR::eShaderStorageRead )
      result += "ShaderStorageRead | ";
    if ( value & AccessFlagBits2KHR::eShaderStorageWrite )
      result += "ShaderStorageWrite | ";
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & AccessFlagBits2KHR::eVideoDecodeRead )
      result += "VideoDecodeRead | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & AccessFlagBits2KHR::eVideoDecodeWrite )
      result += "VideoDecodeWrite | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & AccessFlagBits2KHR::eVideoEncodeRead )
      result += "VideoEncodeRead | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    if ( value & AccessFlagBits2KHR::eVideoEncodeWrite )
      result += "VideoEncodeWrite | ";
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    if ( value & AccessFlagBits2KHR::eTransformFeedbackWriteEXT )
      result += "TransformFeedbackWriteEXT | ";
    if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterReadEXT )
      result += "TransformFeedbackCounterReadEXT | ";
    if ( value & AccessFlagBits2KHR::eTransformFeedbackCounterWriteEXT )
      result += "TransformFeedbackCounterWriteEXT | ";
    if ( value & AccessFlagBits2KHR::eConditionalRenderingReadEXT )
      result += "ConditionalRenderingReadEXT | ";
    if ( value & AccessFlagBits2KHR::eCommandPreprocessReadNV )
      result += "CommandPreprocessReadNV | ";
    if ( value & AccessFlagBits2KHR::eCommandPreprocessWriteNV )
      result += "CommandPreprocessWriteNV | ";
    if ( value & AccessFlagBits2KHR::eFragmentShadingRateAttachmentRead )
      result += "FragmentShadingRateAttachmentRead | ";
    if ( value & AccessFlagBits2KHR::eAccelerationStructureRead )
      result += "AccelerationStructureRead | ";
    if ( value & AccessFlagBits2KHR::eAccelerationStructureWrite )
      result += "AccelerationStructureWrite | ";
    if ( value & AccessFlagBits2KHR::eFragmentDensityMapReadEXT )
      result += "FragmentDensityMapReadEXT | ";
    if ( value & AccessFlagBits2KHR::eColorAttachmentReadNoncoherentEXT )
      result += "ColorAttachmentReadNoncoherentEXT | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

  using SubmitFlagsKHR = Flags<SubmitFlagBitsKHR>;

  template <>
  struct FlagTraits<SubmitFlagBitsKHR>
  {
    enum : VkFlags
    {
      allFlags = VkFlags( SubmitFlagBitsKHR::eProtected )
    };
  };

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator|( SubmitFlagBitsKHR bit0,
                                                                   SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SubmitFlagsKHR( bit0 ) | bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator&(SubmitFlagBitsKHR bit0,
                                                                  SubmitFlagBitsKHR bit1)VULKAN_HPP_NOEXCEPT
  {
    return SubmitFlagsKHR( bit0 ) & bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator^( SubmitFlagBitsKHR bit0,
                                                                   SubmitFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  {
    return SubmitFlagsKHR( bit0 ) ^ bit1;
  }

  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlagsKHR operator~( SubmitFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  {
    return ~( SubmitFlagsKHR( bits ) );
  }

  VULKAN_HPP_INLINE std::string to_string( SubmitFlagsKHR value )
  {
    if ( !value )
      return "{}";

    std::string result;
    if ( value & SubmitFlagBitsKHR::eProtected )
      result += "Protected | ";
    return "{ " + result.substr( 0, result.size() - 3 ) + " }";
  }

#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  //=== VK_EXT_directfb_surface ===

  enum class DirectFBSurfaceCreateFlagBitsEXT : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT )
  {
    return "(void)";
  }

  using DirectFBSurfaceCreateFlagsEXT = Flags<DirectFBSurfaceCreateFlagBitsEXT>;

  VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
  //=== VK_QNX_screen_surface ===

  enum class ScreenSurfaceCreateFlagBitsQNX : VkFlags
  {
  };

  VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagBitsQNX )
  {
    return "(void)";
  }

  using ScreenSurfaceCreateFlagsQNX = Flags<ScreenSurfaceCreateFlagBitsQNX>;

  VULKAN_HPP_INLINE std::string to_string( ScreenSurfaceCreateFlagsQNX )
  {
    return "{}";
  }
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
}  // namespace VULKAN_HPP_NAMESPACE

#ifndef VULKAN_HPP_NO_EXCEPTIONS
namespace std
{
  template <>
  struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
  {};
}  // namespace std
#endif

namespace VULKAN_HPP_NAMESPACE
{
#ifndef VULKAN_HPP_NO_EXCEPTIONS
  class ErrorCategoryImpl : public std::error_category
  {
  public:
    virtual const char * name() const VULKAN_HPP_NOEXCEPT override
    {
      return VULKAN_HPP_NAMESPACE_STRING "::Result";
    }
    virtual std::string message( int ev ) const override
    {
      return to_string( static_cast<Result>( ev ) );
    }
  };

  class Error
  {
  public:
    Error() VULKAN_HPP_NOEXCEPT                = default;
    Error( const Error & ) VULKAN_HPP_NOEXCEPT = default;
    virtual ~Error() VULKAN_HPP_NOEXCEPT       = default;

    virtual const char * what() const VULKAN_HPP_NOEXCEPT = 0;
  };

  class LogicError
    : public Error
    , public std::logic_error
  {
  public:
    explicit LogicError( const std::string & what ) : Error(), std::logic_error( what ) {}
    explicit LogicError( char const * what ) : Error(), std::logic_error( what ) {}

    virtual const char * what() const VULKAN_HPP_NOEXCEPT
    {
      return std::logic_error::what();
    }
  };

  class SystemError
    : public Error
    , public std::system_error
  {
  public:
    SystemError( std::error_code ec ) : Error(), std::system_error( ec ) {}
    SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {}
    SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {}
    SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {}
    SystemError( int ev, std::error_category const & ecat, std::string const & what )
      : Error(), std::system_error( ev, ecat, what )
    {}
    SystemError( int ev, std::error_category const & ecat, char const * what )
      : Error(), std::system_error( ev, ecat, what )
    {}

    virtual const char * what() const VULKAN_HPP_NOEXCEPT
    {
      return std::system_error::what();
    }
  };

  VULKAN_HPP_INLINE const std::error_category & errorCategory() VULKAN_HPP_NOEXCEPT
  {
    static ErrorCategoryImpl instance;
    return instance;
  }

  VULKAN_HPP_INLINE std::error_code make_error_code( Result e ) VULKAN_HPP_NOEXCEPT
  {
    return std::error_code( static_cast<int>( e ), errorCategory() );
  }

  VULKAN_HPP_INLINE std::error_condition make_error_condition( Result e ) VULKAN_HPP_NOEXCEPT
  {
    return std::error_condition( static_cast<int>( e ), errorCategory() );
  }

  class OutOfHostMemoryError : public SystemError
  {
  public:
    OutOfHostMemoryError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message )
    {}
    OutOfHostMemoryError( char const * message )
      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message )
    {}
  };

  class OutOfDeviceMemoryError : public SystemError
  {
  public:
    OutOfDeviceMemoryError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message )
    {}
    OutOfDeviceMemoryError( char const * message )
      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message )
    {}
  };

  class InitializationFailedError : public SystemError
  {
  public:
    InitializationFailedError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message )
    {}
    InitializationFailedError( char const * message )
      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message )
    {}
  };

  class DeviceLostError : public SystemError
  {
  public:
    DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message )
    {}
    DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
  };

  class MemoryMapFailedError : public SystemError
  {
  public:
    MemoryMapFailedError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message )
    {}
    MemoryMapFailedError( char const * message )
      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message )
    {}
  };

  class LayerNotPresentError : public SystemError
  {
  public:
    LayerNotPresentError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message )
    {}
    LayerNotPresentError( char const * message )
      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message )
    {}
  };

  class ExtensionNotPresentError : public SystemError
  {
  public:
    ExtensionNotPresentError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message )
    {}
    ExtensionNotPresentError( char const * message )
      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message )
    {}
  };

  class FeatureNotPresentError : public SystemError
  {
  public:
    FeatureNotPresentError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message )
    {}
    FeatureNotPresentError( char const * message )
      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message )
    {}
  };

  class IncompatibleDriverError : public SystemError
  {
  public:
    IncompatibleDriverError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message )
    {}
    IncompatibleDriverError( char const * message )
      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message )
    {}
  };

  class TooManyObjectsError : public SystemError
  {
  public:
    TooManyObjectsError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message )
    {}
    TooManyObjectsError( char const * message )
      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message )
    {}
  };

  class FormatNotSupportedError : public SystemError
  {
  public:
    FormatNotSupportedError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message )
    {}
    FormatNotSupportedError( char const * message )
      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message )
    {}
  };

  class FragmentedPoolError : public SystemError
  {
  public:
    FragmentedPoolError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message )
    {}
    FragmentedPoolError( char const * message )
      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message )
    {}
  };

  class UnknownError : public SystemError
  {
  public:
    UnknownError( std::string const & message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
    UnknownError( char const * message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
  };

  class OutOfPoolMemoryError : public SystemError
  {
  public:
    OutOfPoolMemoryError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message )
    {}
    OutOfPoolMemoryError( char const * message )
      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message )
    {}
  };

  class InvalidExternalHandleError : public SystemError
  {
  public:
    InvalidExternalHandleError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message )
    {}
    InvalidExternalHandleError( char const * message )
      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message )
    {}
  };

  class FragmentationError : public SystemError
  {
  public:
    FragmentationError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorFragmentation ), message )
    {}
    FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message )
    {}
  };

  class InvalidOpaqueCaptureAddressError : public SystemError
  {
  public:
    InvalidOpaqueCaptureAddressError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message )
    {}
    InvalidOpaqueCaptureAddressError( char const * message )
      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message )
    {}
  };

  class SurfaceLostKHRError : public SystemError
  {
  public:
    SurfaceLostKHRError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message )
    {}
    SurfaceLostKHRError( char const * message )
      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message )
    {}
  };

  class NativeWindowInUseKHRError : public SystemError
  {
  public:
    NativeWindowInUseKHRError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message )
    {}
    NativeWindowInUseKHRError( char const * message )
      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message )
    {}
  };

  class OutOfDateKHRError : public SystemError
  {
  public:
    OutOfDateKHRError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message )
    {}
    OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
  };

  class IncompatibleDisplayKHRError : public SystemError
  {
  public:
    IncompatibleDisplayKHRError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message )
    {}
    IncompatibleDisplayKHRError( char const * message )
      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message )
    {}
  };

  class ValidationFailedEXTError : public SystemError
  {
  public:
    ValidationFailedEXTError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message )
    {}
    ValidationFailedEXTError( char const * message )
      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message )
    {}
  };

  class InvalidShaderNVError : public SystemError
  {
  public:
    InvalidShaderNVError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message )
    {}
    InvalidShaderNVError( char const * message )
      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message )
    {}
  };

  class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError
  {
  public:
    InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message )
    {}
    InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message )
      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message )
    {}
  };

  class NotPermittedEXTError : public SystemError
  {
  public:
    NotPermittedEXTError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message )
    {}
    NotPermittedEXTError( char const * message )
      : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message )
    {}
  };

#  if defined( VK_USE_PLATFORM_WIN32_KHR )
  class FullScreenExclusiveModeLostEXTError : public SystemError
  {
  public:
    FullScreenExclusiveModeLostEXTError( std::string const & message )
      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message )
    {}
    FullScreenExclusiveModeLostEXTError( char const * message )
      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message )
    {}
  };
#  endif /*VK_USE_PLATFORM_WIN32_KHR*/

  [[noreturn]] static void throwResultException( Result result, char const * message )
  {
    switch ( result )
    {
      case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message );
      case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message );
      case Result::eErrorInitializationFailed: throw InitializationFailedError( message );
      case Result::eErrorDeviceLost: throw DeviceLostError( message );
      case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message );
      case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message );
      case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message );
      case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message );
      case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message );
      case Result::eErrorTooManyObjects: throw TooManyObjectsError( message );
      case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message );
      case Result::eErrorFragmentedPool: throw FragmentedPoolError( message );
      case Result::eErrorUnknown: throw UnknownError( message );
      case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message );
      case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message );
      case Result::eErrorFragmentation: throw FragmentationError( message );
      case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message );
      case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message );
      case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message );
      case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message );
      case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message );
      case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message );
      case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message );
      case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT:
        throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
      case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message );
#  if defined( VK_USE_PLATFORM_WIN32_KHR )
      case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message );
#  endif /*VK_USE_PLATFORM_WIN32_KHR*/
      default: throw SystemError( make_error_code( result ) );
    }
  }
#endif

  template <typename T>
  void ignore( T const & ) VULKAN_HPP_NOEXCEPT
  {}

  template <typename T>
  struct ResultValue
  {
#ifdef VULKAN_HPP_HAS_NOEXCEPT
    ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( v ) ) )
#else
    ResultValue( Result r, T & v )
#endif
      : result( r ), value( v )
    {}

#ifdef VULKAN_HPP_HAS_NOEXCEPT
    ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) )
#else
    ResultValue( Result r, T && v )
#endif
      : result( r ), value( std::move( v ) )
    {}

    Result result;
    T      value;

    operator std::tuple<Result &, T &>() VULKAN_HPP_NOEXCEPT
    {
      return std::tuple<Result &, T &>( result, value );
    }

#if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST )
    VULKAN_HPP_DEPRECATED(
      "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
    operator T const &() const & VULKAN_HPP_NOEXCEPT
    {
      return value;
    }

    VULKAN_HPP_DEPRECATED(
      "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
    operator T &() & VULKAN_HPP_NOEXCEPT
    {
      return value;
    }

    VULKAN_HPP_DEPRECATED(
      "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
    operator T const &&() const && VULKAN_HPP_NOEXCEPT
    {
      return std::move( value );
    }

    VULKAN_HPP_DEPRECATED(
      "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
    operator T &&() && VULKAN_HPP_NOEXCEPT
    {
      return std::move( value );
    }
#endif
  };

#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
  template <typename Type, typename Dispatch>
  struct ResultValue<UniqueHandle<Type, Dispatch>>
  {
#  ifdef VULKAN_HPP_HAS_NOEXCEPT
    ResultValue( Result r, UniqueHandle<Type, Dispatch> && v ) VULKAN_HPP_NOEXCEPT
#  else
    ResultValue( Result r, UniqueHandle<Type, Dispatch> && v )
#  endif
      : result( r )
      , value( std::move( v ) )
    {}

    std::tuple<Result, UniqueHandle<Type, Dispatch>> asTuple()
    {
      return std::make_tuple( result, std::move( value ) );
    }

#  if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST )
    VULKAN_HPP_DEPRECATED(
      "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
    operator UniqueHandle<Type, Dispatch> &() & VULKAN_HPP_NOEXCEPT
    {
      return value;
    }

    VULKAN_HPP_DEPRECATED(
      "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
    operator UniqueHandle<Type, Dispatch>() VULKAN_HPP_NOEXCEPT
    {
      return std::move( value );
    }
#  endif

    Result                       result;
    UniqueHandle<Type, Dispatch> value;
  };

  template <typename Type, typename Dispatch>
  struct ResultValue<std::vector<UniqueHandle<Type, Dispatch>>>
  {
#  ifdef VULKAN_HPP_HAS_NOEXCEPT
    ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v ) VULKAN_HPP_NOEXCEPT
#  else
    ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v )
#  endif
      : result( r )
      , value( std::move( v ) )
    {}

    std::tuple<Result, std::vector<UniqueHandle<Type, Dispatch>>> asTuple()
    {
      return std::make_tuple( result, std::move( value ) );
    }

    Result                                    result;
    std::vector<UniqueHandle<Type, Dispatch>> value;

#  if !defined( VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST )
    VULKAN_HPP_DEPRECATED(
      "Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue." )
    operator std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>() VULKAN_HPP_NOEXCEPT
    {
      return std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>( result, value );
    }
#  endif
  };
#endif

  template <typename T>
  struct ResultValueType
  {
#ifdef VULKAN_HPP_NO_EXCEPTIONS
    typedef ResultValue<T> type;
#else
    typedef T    type;
#endif
  };

  template <>
  struct ResultValueType<void>
  {
#ifdef VULKAN_HPP_NO_EXCEPTIONS
    typedef Result type;
#else
    typedef void type;
#endif
  };

  VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
  {
#ifdef VULKAN_HPP_NO_EXCEPTIONS
    ignore( message );
    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
    return result;
#else
    if ( result != Result::eSuccess )
    {
      throwResultException( result, message );
    }
#endif
  }

  template <typename T>
  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
  {
#ifdef VULKAN_HPP_NO_EXCEPTIONS
    ignore( message );
    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
    return ResultValue<T>( result, std::move( data ) );
#else
    if ( result != Result::eSuccess )
    {
      throwResultException( result, message );
    }
    return std::move( data );
#endif
  }

  VULKAN_HPP_INLINE Result createResultValue( Result                        result,
                                              char const *                  message,
                                              std::initializer_list<Result> successCodes )
  {
#ifdef VULKAN_HPP_NO_EXCEPTIONS
    ignore( message );
    ignore( successCodes );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
#else
    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
    {
      throwResultException( result, message );
    }
#endif
    return result;
  }

  template <typename T>
  VULKAN_HPP_INLINE ResultValue<T>
                    createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
  {
#ifdef VULKAN_HPP_NO_EXCEPTIONS
    ignore( message );
    ignore( successCodes );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
#else
    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
    {
      throwResultException( result, message );
    }
#endif
    return ResultValue<T>( result, std::move( data ) );
  }

#ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename T, typename D>
  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<T, D>>::type createResultValue(
    Result result, T & data, char const * message, typename UniqueHandleTraits<T, D>::deleter const & deleter )
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    ignore( message );
    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
    return ResultValue<UniqueHandle<T, D>>( result, UniqueHandle<T, D>( data, deleter ) );
#  else
    if ( result != Result::eSuccess )
    {
      throwResultException( result, message );
    }
    return UniqueHandle<T, D>( data, deleter );
#  endif
  }

  template <typename T, typename D>
  VULKAN_HPP_INLINE ResultValue<UniqueHandle<T, D>>
                    createResultValue( Result                                             result,
                                       T &                                                data,
                                       char const *                                       message,
                                       std::initializer_list<Result>                      successCodes,
                                       typename UniqueHandleTraits<T, D>::deleter const & deleter )
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    ignore( message );
    ignore( successCodes );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
#  else
    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
    {
      throwResultException( result, message );
    }
#  endif
    return ResultValue<UniqueHandle<T, D>>( result, UniqueHandle<T, D>( data, deleter ) );
  }

  template <typename T, typename D>
  VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<T, D>>>::type
    createResultValue( Result result, std::vector<UniqueHandle<T, D>> && data, char const * message )
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    ignore( message );
    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
    return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
#  else
    if ( result != Result::eSuccess )
    {
      throwResultException( result, message );
    }
    return std::move( data );
#  endif
  }

  template <typename T, typename D>
  VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<T, D>>>
                    createResultValue( Result                             result,
                                       std::vector<UniqueHandle<T, D>> && data,
                                       char const *                       message,
                                       std::initializer_list<Result>      successCodes )
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    ignore( message );
    ignore( successCodes );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
#  else
    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
    {
      throwResultException( result, message );
    }
#  endif
    return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
  }
#endif

  struct AabbPositionsKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AabbPositionsKHR( float minX_ = {},
                                           float minY_ = {},
                                           float minZ_ = {},
                                           float maxX_ = {},
                                           float maxY_ = {},
                                           float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT
      : minX( minX_ )
      , minY( minY_ )
      , minZ( minZ_ )
      , maxX( maxX_ )
      , maxY( maxY_ )
      , maxZ( maxZ_ )
    {}

    VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AabbPositionsKHR( *reinterpret_cast<AabbPositionsKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AabbPositionsKHR const *>( &rhs );
      return *this;
    }

    AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT
    {
      minX = minX_;
      return *this;
    }

    AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT
    {
      minY = minY_;
      return *this;
    }

    AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT
    {
      minZ = minZ_;
      return *this;
    }

    AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT
    {
      maxX = maxX_;
      return *this;
    }

    AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT
    {
      maxY = maxY_;
      return *this;
    }

    AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT
    {
      maxZ = maxZ_;
      return *this;
    }

    operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAabbPositionsKHR *>( this );
    }

    operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAabbPositionsKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AabbPositionsKHR const & ) const = default;
#else
    bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) &&
             ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ );
    }

    bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float minX = {};
    float minY = {};
    float minZ = {};
    float maxX = {};
    float maxY = {};
    float maxZ = {};
  };
  static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AabbPositionsKHR>::value, "struct wrapper is not a standard layout!" );
  using AabbPositionsNV = AabbPositionsKHR;

  class AccelerationStructureKHR
  {
  public:
    using CType = VkAccelerationStructureKHR;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;

  public:
    VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default;
    VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT
      AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT
      : m_accelerationStructureKHR( accelerationStructureKHR )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT
    {
      m_accelerationStructureKHR = accelerationStructureKHR;
      return *this;
    }
#endif

    AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_accelerationStructureKHR = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR;
    }

    bool operator!=( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR;
    }

    bool operator<( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureKHR;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureKHR != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureKHR == VK_NULL_HANDLE;
    }

  private:
    VkAccelerationStructureKHR m_accelerationStructureKHR = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eAccelerationStructureKHR>
  {
    using type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  union DeviceOrHostAddressConstKHR
  {
    DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) );
    }

    DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
      : deviceAddress( deviceAddress_ )
    {}

    DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {}

    DeviceOrHostAddressConstKHR &
      setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      hostAddress = hostAddress_;
      return *this;
    }

    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR &
      operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) );
      return *this;
    }

    operator VkDeviceOrHostAddressConstKHR const &() const
    {
      return *reinterpret_cast<const VkDeviceOrHostAddressConstKHR *>( this );
    }

    operator VkDeviceOrHostAddressConstKHR &()
    {
      return *reinterpret_cast<VkDeviceOrHostAddressConstKHR *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
    const void *                        hostAddress;
#else
    VkDeviceAddress                                 deviceAddress;
    const void *                                    hostAddress;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

  struct AccelerationStructureGeometryTrianglesDataKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureGeometryTrianglesDataKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    AccelerationStructureGeometryTrianglesDataKHR(
      VULKAN_HPP_NAMESPACE::Format                      vertexFormat_  = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_    = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                  vertexStride_  = {},
      uint32_t                                          maxVertex_     = {},
      VULKAN_HPP_NAMESPACE::IndexType                   indexType_     = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
      VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_     = {},
      VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {} ) VULKAN_HPP_NOEXCEPT
      : vertexFormat( vertexFormat_ )
      , vertexData( vertexData_ )
      , vertexStride( vertexStride_ )
      , maxVertex( maxVertex_ )
      , indexType( indexType_ )
      , indexData( indexData_ )
      , transformData( transformData_ )
    {}

    AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryTrianglesDataKHR(
          *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    AccelerationStructureGeometryTrianglesDataKHR &
      operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryTrianglesDataKHR &
      operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureGeometryTrianglesDataKHR &
      setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexFormat = vertexFormat_;
      return *this;
    }

    AccelerationStructureGeometryTrianglesDataKHR &
      setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexData = vertexData_;
      return *this;
    }

    AccelerationStructureGeometryTrianglesDataKHR &
      setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexStride = vertexStride_;
      return *this;
    }

    AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT
    {
      maxVertex = maxVertex_;
      return *this;
    }

    AccelerationStructureGeometryTrianglesDataKHR &
      setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    AccelerationStructureGeometryTrianglesDataKHR &
      setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
    {
      indexData = indexData_;
      return *this;
    }

    AccelerationStructureGeometryTrianglesDataKHR &
      setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
    {
      transformData = transformData_;
      return *this;
    }

    operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType        = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
    const void *                        pNext        = {};
    VULKAN_HPP_NAMESPACE::Format        vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData    = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  vertexStride  = {};
    uint32_t                                          maxVertex     = {};
    VULKAN_HPP_NAMESPACE::IndexType                   indexType     = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData     = {};
    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {};
  };
  static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) ==
                   sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureGeometryTrianglesDataKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryTrianglesDataKHR>
  {
    using Type = AccelerationStructureGeometryTrianglesDataKHR;
  };

  struct AccelerationStructureGeometryAabbsDataKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureGeometryAabbsDataKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {},
                                               VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {} ) VULKAN_HPP_NOEXCEPT
      : data( data_ )
      , stride( stride_ )
    {}

    AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryAabbsDataKHR(
          *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    AccelerationStructureGeometryAabbsDataKHR &
      operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryAabbsDataKHR &
      operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureGeometryAabbsDataKHR &
      setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }

    AccelerationStructureGeometryAabbsDataKHR &
      setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
    const void *                                      pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data  = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  stride = {};
  };
  static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) ==
                   sizeof( VkAccelerationStructureGeometryAabbsDataKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureGeometryAabbsDataKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryAabbsDataKHR>
  {
    using Type = AccelerationStructureGeometryAabbsDataKHR;
  };

  struct AccelerationStructureGeometryInstancesDataKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureGeometryInstancesDataKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_           = {},
                                                   VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {} )
      VULKAN_HPP_NOEXCEPT
      : arrayOfPointers( arrayOfPointers_ )
      , data( data_ )
    {}

    AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryInstancesDataKHR(
          *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    AccelerationStructureGeometryInstancesDataKHR &
      operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryInstancesDataKHR &
      operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureGeometryInstancesDataKHR &
      setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
    {
      arrayOfPointers = arrayOfPointers_;
      return *this;
    }

    AccelerationStructureGeometryInstancesDataKHR &
      setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }

    operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
    const void *                        pNext           = {};
    VULKAN_HPP_NAMESPACE::Bool32        arrayOfPointers = {};
    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
  };
  static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) ==
                   sizeof( VkAccelerationStructureGeometryInstancesDataKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureGeometryInstancesDataKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryInstancesDataKHR>
  {
    using Type = AccelerationStructureGeometryInstancesDataKHR;
  };

  union AccelerationStructureGeometryDataKHR
  {
    AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) );
    }

    AccelerationStructureGeometryDataKHR(
      VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} )
      : triangles( triangles_ )
    {}

    AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ )
      : aabbs( aabbs_ )
    {}

    AccelerationStructureGeometryDataKHR(
      VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ )
      : instances( instances_ )
    {}

    AccelerationStructureGeometryDataKHR & setTriangles(
      VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
    {
      triangles = triangles_;
      return *this;
    }

    AccelerationStructureGeometryDataKHR &
      setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
    {
      aabbs = aabbs_;
      return *this;
    }

    AccelerationStructureGeometryDataKHR & setInstances(
      VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
    {
      instances = instances_;
      return *this;
    }

    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR &
      operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) );
      return *this;
    }

    operator VkAccelerationStructureGeometryDataKHR const &() const
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryDataKHR *>( this );
    }

    operator VkAccelerationStructureGeometryDataKHR &()
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryDataKHR *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles;
    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR     aabbs;
    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances;
#else
    VkAccelerationStructureGeometryTrianglesDataKHR triangles;
    VkAccelerationStructureGeometryAabbsDataKHR     aabbs;
    VkAccelerationStructureGeometryInstancesDataKHR instances;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

  struct AccelerationStructureGeometryKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    AccelerationStructureGeometryKHR(
      VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles,
      VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {},
      VULKAN_HPP_NAMESPACE::GeometryFlagsKHR                     flags_    = {} ) VULKAN_HPP_NOEXCEPT
      : geometryType( geometryType_ )
      , geometry( geometry_ )
      , flags( flags_ )
    {}

    AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureGeometryKHR( *reinterpret_cast<AccelerationStructureGeometryKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    AccelerationStructureGeometryKHR &
      operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureGeometryKHR &
      setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryType = geometryType_;
      return *this;
    }

    AccelerationStructureGeometryKHR &
      setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
    {
      geometry = geometry_;
      return *this;
    }

    AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureGeometryKHR *>( this );
    }

    operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureGeometryKHR *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType        = StructureType::eAccelerationStructureGeometryKHR;
    const void *                          pNext        = {};
    VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
    VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {};
    VULKAN_HPP_NAMESPACE::GeometryFlagsKHR                     flags    = {};
  };
  static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureGeometryKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureGeometryKHR>
  {
    using Type = AccelerationStructureGeometryKHR;
  };

  union DeviceOrHostAddressKHR
  {
    DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) );
    }

    DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ )
    {}

    DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {}

    DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      hostAddress = hostAddress_;
      return *this;
    }

    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR &
      operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) );
      return *this;
    }

    operator VkDeviceOrHostAddressKHR const &() const
    {
      return *reinterpret_cast<const VkDeviceOrHostAddressKHR *>( this );
    }

    operator VkDeviceOrHostAddressKHR &()
    {
      return *reinterpret_cast<VkDeviceOrHostAddressKHR *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
    void *                              hostAddress;
#else
    VkDeviceAddress                                 deviceAddress;
    void *                                          hostAddress;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

  struct AccelerationStructureBuildGeometryInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureBuildGeometryInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    AccelerationStructureBuildGeometryInfoKHR(
      VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ =
        VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel,
      VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {},
      VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR  mode_ =
        VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild,
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR                         srcAccelerationStructure_ = {},
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR                         dstAccelerationStructure_ = {},
      uint32_t                                                               geometryCount_            = {},
      const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR *         pGeometries_              = {},
      const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_             = {},
      VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR                           scratchData_ = {} ) VULKAN_HPP_NOEXCEPT
      : type( type_ )
      , flags( flags_ )
      , mode( mode_ )
      , srcAccelerationStructure( srcAccelerationStructure_ )
      , dstAccelerationStructure( dstAccelerationStructure_ )
      , geometryCount( geometryCount_ )
      , pGeometries( pGeometries_ )
      , ppGeometries( ppGeometries_ )
      , scratchData( scratchData_ )
    {}

    AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : AccelerationStructureBuildGeometryInfoKHR(
          *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureBuildGeometryInfoKHR(
      VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR       type_,
      VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_,
      VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR  mode_,
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR           srcAccelerationStructure_,
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR           dstAccelerationStructure_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ = {},
      VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR                                    scratchData_ = {} )
      : type( type_ )
      , flags( flags_ )
      , mode( mode_ )
      , srcAccelerationStructure( srcAccelerationStructure_ )
      , dstAccelerationStructure( dstAccelerationStructure_ )
      , geometryCount( static_cast<uint32_t>( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) )
      , pGeometries( geometries_.data() )
      , ppGeometries( pGeometries_.data() )
      , scratchData( scratchData_ )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1 );
#    else
      if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    AccelerationStructureBuildGeometryInfoKHR &
      operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureBuildGeometryInfoKHR &
      operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureBuildGeometryInfoKHR &
      setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    AccelerationStructureBuildGeometryInfoKHR &
      setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    AccelerationStructureBuildGeometryInfoKHR &
      setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure(
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccelerationStructure = srcAccelerationStructure_;
      return *this;
    }

    AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure(
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccelerationStructure = dstAccelerationStructure_;
      return *this;
    }

    AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryCount = geometryCount_;
      return *this;
    }

    AccelerationStructureBuildGeometryInfoKHR &
      setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT
    {
      pGeometries = pGeometries_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureBuildGeometryInfoKHR & setGeometries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryCount = static_cast<uint32_t>( geometries_.size() );
      pGeometries   = geometries_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    AccelerationStructureBuildGeometryInfoKHR & setPpGeometries(
      const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
    {
      ppGeometries = ppGeometries_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureBuildGeometryInfoKHR & setPGeometries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const> const & pGeometries_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryCount = static_cast<uint32_t>( pGeometries_.size() );
      ppGeometries  = pGeometries_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    AccelerationStructureBuildGeometryInfoKHR &
      setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
    {
      scratchData = scratchData_;
      return *this;
    }

    operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type =
      VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR  mode =
      VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild;
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR                         srcAccelerationStructure = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR                         dstAccelerationStructure = {};
    uint32_t                                                               geometryCount            = {};
    const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR *         pGeometries              = {};
    const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries             = {};
    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR                           scratchData              = {};
  };
  static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) ==
                   sizeof( VkAccelerationStructureBuildGeometryInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureBuildGeometryInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureBuildGeometryInfoKHR>
  {
    using Type = AccelerationStructureBuildGeometryInfoKHR;
  };

  struct AccelerationStructureBuildRangeInfoKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( uint32_t primitiveCount_  = {},
                                                                 uint32_t primitiveOffset_ = {},
                                                                 uint32_t firstVertex_     = {},
                                                                 uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT
      : primitiveCount( primitiveCount_ )
      , primitiveOffset( primitiveOffset_ )
      , firstVertex( firstVertex_ )
      , transformOffset( transformOffset_ )
    {}

    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureBuildRangeInfoKHR(
          *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR &
                            operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureBuildRangeInfoKHR &
      operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveCount = primitiveCount_;
      return *this;
    }

    AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveOffset = primitiveOffset_;
      return *this;
    }

    AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
    {
      firstVertex = firstVertex_;
      return *this;
    }

    AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      transformOffset = transformOffset_;
      return *this;
    }

    operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) &&
             ( firstVertex == rhs.firstVertex ) && ( transformOffset == rhs.transformOffset );
    }

    bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t primitiveCount  = {};
    uint32_t primitiveOffset = {};
    uint32_t firstVertex     = {};
    uint32_t transformOffset = {};
  };
  static_assert( sizeof( AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureBuildRangeInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  struct AccelerationStructureBuildSizesInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureBuildSizesInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR(
      VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {},
      VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_         = {},
      VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_          = {} ) VULKAN_HPP_NOEXCEPT
      : accelerationStructureSize( accelerationStructureSize_ )
      , updateScratchSize( updateScratchSize_ )
      , buildScratchSize( buildScratchSize_ )
    {}

    VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureBuildSizesInfoKHR(
          *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR &
                            operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureBuildSizesInfoKHR &
      operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureBuildSizesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureBuildSizesInfoKHR &
      setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureSize = accelerationStructureSize_;
      return *this;
    }

    AccelerationStructureBuildSizesInfoKHR &
      setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT
    {
      updateScratchSize = updateScratchSize_;
      return *this;
    }

    AccelerationStructureBuildSizesInfoKHR &
      setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
    {
      buildScratchSize = buildScratchSize_;
      return *this;
    }

    operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR *>( this );
    }

    operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( accelerationStructureSize == rhs.accelerationStructureSize ) &&
             ( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize );
    }

    bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    accelerationStructureSize = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    updateScratchSize         = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    buildScratchSize          = {};
  };
  static_assert( sizeof( AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureBuildSizesInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureBuildSizesInfoKHR>
  {
    using Type = AccelerationStructureBuildSizesInfoKHR;
  };

  class Buffer
  {
  public:
    using CType = VkBuffer;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;

  public:
    VULKAN_HPP_CONSTEXPR         Buffer() = default;
    VULKAN_HPP_CONSTEXPR         Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT : m_buffer( buffer ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Buffer & operator=( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT
    {
      m_buffer = buffer;
      return *this;
    }
#endif

    Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_buffer = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Buffer const & ) const = default;
#else
    bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_buffer == rhs.m_buffer;
    }

    bool operator!=( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_buffer != rhs.m_buffer;
    }

    bool operator<( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_buffer < rhs.m_buffer;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT
    {
      return m_buffer;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_buffer != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_buffer == VK_NULL_HANDLE;
    }

  private:
    VkBuffer m_buffer = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eBuffer>
  {
    using type = VULKAN_HPP_NAMESPACE::Buffer;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBuffer>
  {
    using Type = VULKAN_HPP_NAMESPACE::Buffer;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer>
  {
    using Type = VULKAN_HPP_NAMESPACE::Buffer;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Buffer>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct AccelerationStructureCreateInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {},
                                          VULKAN_HPP_NAMESPACE::Buffer                              buffer_      = {},
                                          VULKAN_HPP_NAMESPACE::DeviceSize                          offset_      = {},
                                          VULKAN_HPP_NAMESPACE::DeviceSize                          size_        = {},
                                          VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR        type_ =
                                            VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel,
                                          VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT
      : createFlags( createFlags_ )
      , buffer( buffer_ )
      , offset( offset_ )
      , size( size_ )
      , type( type_ )
      , deviceAddress( deviceAddress_ )
    {}

    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureCreateInfoKHR( *reinterpret_cast<AccelerationStructureCreateInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR &
                            operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureCreateInfoKHR &
      operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureCreateInfoKHR &
      setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      createFlags = createFlags_;
      return *this;
    }

    AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    AccelerationStructureCreateInfoKHR &
      setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    AccelerationStructureCreateInfoKHR &
      setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( this );
    }

    operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) &&
             ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ) && ( type == rhs.type ) &&
             ( deviceAddress == rhs.deviceAddress );
    }

    bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {};
    VULKAN_HPP_NAMESPACE::Buffer                              buffer      = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                          offset      = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                          size        = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR        type =
      VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
  };
  static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoKHR>
  {
    using Type = AccelerationStructureCreateInfoKHR;
  };

  struct GeometryTrianglesNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer     vertexData_      = {},
                           VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_    = {},
                           uint32_t                         vertexCount_     = {},
                           VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_    = {},
                           VULKAN_HPP_NAMESPACE::Format     vertexFormat_    = VULKAN_HPP_NAMESPACE::Format::eUndefined,
                           VULKAN_HPP_NAMESPACE::Buffer     indexData_       = {},
                           VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_     = {},
                           uint32_t                         indexCount_      = {},
                           VULKAN_HPP_NAMESPACE::IndexType  indexType_       = VULKAN_HPP_NAMESPACE::IndexType::eUint16,
                           VULKAN_HPP_NAMESPACE::Buffer     transformData_   = {},
                           VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT
      : vertexData( vertexData_ )
      , vertexOffset( vertexOffset_ )
      , vertexCount( vertexCount_ )
      , vertexStride( vertexStride_ )
      , vertexFormat( vertexFormat_ )
      , indexData( indexData_ )
      , indexOffset( indexOffset_ )
      , indexCount( indexCount_ )
      , indexType( indexType_ )
      , transformData( transformData_ )
      , transformOffset( transformOffset_ )
    {}

    VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeometryTrianglesNV( *reinterpret_cast<GeometryTrianglesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV &
                            operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>( &rhs );
      return *this;
    }

    GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexData = vertexData_;
      return *this;
    }

    GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexOffset = vertexOffset_;
      return *this;
    }

    GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexCount = vertexCount_;
      return *this;
    }

    GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexStride = vertexStride_;
      return *this;
    }

    GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexFormat = vertexFormat_;
      return *this;
    }

    GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
    {
      indexData = indexData_;
      return *this;
    }

    GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      indexOffset = indexOffset_;
      return *this;
    }

    GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      indexCount = indexCount_;
      return *this;
    }

    GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
    {
      transformData = transformData_;
      return *this;
    }

    GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      transformOffset = transformOffset_;
      return *this;
    }

    operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeometryTrianglesNV *>( this );
    }

    operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeometryTrianglesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeometryTrianglesNV const & ) const = default;
#else
    bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) &&
             ( vertexOffset == rhs.vertexOffset ) && ( vertexCount == rhs.vertexCount ) &&
             ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) &&
             ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) &&
             ( indexType == rhs.indexType ) && ( transformData == rhs.transformData ) &&
             ( transformOffset == rhs.transformOffset );
    }

    bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::eGeometryTrianglesNV;
    const void *                        pNext           = {};
    VULKAN_HPP_NAMESPACE::Buffer        vertexData      = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    vertexOffset    = {};
    uint32_t                            vertexCount     = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    vertexStride    = {};
    VULKAN_HPP_NAMESPACE::Format        vertexFormat    = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::Buffer        indexData       = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    indexOffset     = {};
    uint32_t                            indexCount      = {};
    VULKAN_HPP_NAMESPACE::IndexType     indexType       = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
    VULKAN_HPP_NAMESPACE::Buffer        transformData   = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    transformOffset = {};
  };
  static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eGeometryTrianglesNV>
  {
    using Type = GeometryTrianglesNV;
  };

  struct GeometryAABBNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer     aabbData_ = {},
                                         uint32_t                         numAABBs_ = {},
                                         uint32_t                         stride_   = {},
                                         VULKAN_HPP_NAMESPACE::DeviceSize offset_   = {} ) VULKAN_HPP_NOEXCEPT
      : aabbData( aabbData_ )
      , numAABBs( numAABBs_ )
      , stride( stride_ )
      , offset( offset_ )
    {}

    VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeometryAABBNV( *reinterpret_cast<GeometryAABBNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>( &rhs );
      return *this;
    }

    GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
    {
      aabbData = aabbData_;
      return *this;
    }

    GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
    {
      numAABBs = numAABBs_;
      return *this;
    }

    GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeometryAABBNV *>( this );
    }

    operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeometryAABBNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeometryAABBNV const & ) const = default;
#else
    bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) &&
             ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) && ( offset == rhs.offset );
    }

    bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::eGeometryAabbNV;
    const void *                        pNext    = {};
    VULKAN_HPP_NAMESPACE::Buffer        aabbData = {};
    uint32_t                            numAABBs = {};
    uint32_t                            stride   = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    offset   = {};
  };
  static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eGeometryAabbNV>
  {
    using Type = GeometryAABBNV;
  };

  struct GeometryDataNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {},
                                         VULKAN_HPP_NAMESPACE::GeometryAABBNV      aabbs_     = {} ) VULKAN_HPP_NOEXCEPT
      : triangles( triangles_ )
      , aabbs( aabbs_ )
    {}

    VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeometryDataNV( *reinterpret_cast<GeometryDataNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>( &rhs );
      return *this;
    }

    GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
    {
      triangles = triangles_;
      return *this;
    }

    GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
    {
      aabbs = aabbs_;
      return *this;
    }

    operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeometryDataNV *>( this );
    }

    operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeometryDataNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeometryDataNV const & ) const = default;
#else
    bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs );
    }

    bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
    VULKAN_HPP_NAMESPACE::GeometryAABBNV      aabbs     = {};
  };
  static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<GeometryDataNV>::value, "struct wrapper is not a standard layout!" );

  struct GeometryNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeometryNV(
      VULKAN_HPP_NAMESPACE::GeometryTypeKHR  geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles,
      VULKAN_HPP_NAMESPACE::GeometryDataNV   geometry_     = {},
      VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_        = {} ) VULKAN_HPP_NOEXCEPT
      : geometryType( geometryType_ )
      , geometry( geometry_ )
      , flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeometryNV( *reinterpret_cast<GeometryNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>( &rhs );
      return *this;
    }

    GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryType = geometryType_;
      return *this;
    }

    GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
    {
      geometry = geometry_;
      return *this;
    }

    GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeometryNV *>( this );
    }

    operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeometryNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeometryNV const & ) const = default;
#else
    bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) &&
             ( geometry == rhs.geometry ) && ( flags == rhs.flags );
    }

    bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType        = StructureType::eGeometryNV;
    const void *                           pNext        = {};
    VULKAN_HPP_NAMESPACE::GeometryTypeKHR  geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
    VULKAN_HPP_NAMESPACE::GeometryDataNV   geometry     = {};
    VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags        = {};
  };
  static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<GeometryNV>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eGeometryNV>
  {
    using Type = GeometryNV;
  };

  struct AccelerationStructureInfoNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV       type_          = {},
                                   VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_         = {},
                                   uint32_t                                                instanceCount_ = {},
                                   uint32_t                                                geometryCount_ = {},
                                   const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT
      : type( type_ )
      , flags( flags_ )
      , instanceCount( instanceCount_ )
      , geometryCount( geometryCount_ )
      , pGeometries( pGeometries_ )
    {}

    VULKAN_HPP_CONSTEXPR
      AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureInfoNV( *reinterpret_cast<AccelerationStructureInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureInfoNV(
      VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV                                             type_,
      VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV                                       flags_,
      uint32_t                                                                                      instanceCount_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ )
      : type( type_ )
      , flags( flags_ )
      , instanceCount( instanceCount_ )
      , geometryCount( static_cast<uint32_t>( geometries_.size() ) )
      , pGeometries( geometries_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV &
                            operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>( &rhs );
      return *this;
    }

    AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    AccelerationStructureInfoNV &
      setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCount = instanceCount_;
      return *this;
    }

    AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryCount = geometryCount_;
      return *this;
    }

    AccelerationStructureInfoNV &
      setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ ) VULKAN_HPP_NOEXCEPT
    {
      pGeometries = pGeometries_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    AccelerationStructureInfoNV & setGeometries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ )
      VULKAN_HPP_NOEXCEPT
    {
      geometryCount = static_cast<uint32_t>( geometries_.size() );
      pGeometries   = geometries_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureInfoNV *>( this );
    }

    operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureInfoNV const & ) const = default;
#else
    bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) &&
             ( instanceCount == rhs.instanceCount ) && ( geometryCount == rhs.geometryCount ) &&
             ( pGeometries == rhs.pGeometries );
    }

    bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                     sType         = StructureType::eAccelerationStructureInfoNV;
    const void *                                            pNext         = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV       type          = {};
    VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags         = {};
    uint32_t                                                instanceCount = {};
    uint32_t                                                geometryCount = {};
    const VULKAN_HPP_NAMESPACE::GeometryNV *                pGeometries   = {};
  };
  static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureInfoNV>
  {
    using Type = AccelerationStructureInfoNV;
  };

  struct AccelerationStructureCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(
      VULKAN_HPP_NAMESPACE::DeviceSize                  compactedSize_ = {},
      VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_          = {} ) VULKAN_HPP_NOEXCEPT
      : compactedSize( compactedSize_ )
      , info( info_ )
    {}

    VULKAN_HPP_CONSTEXPR
      AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureCreateInfoNV( *reinterpret_cast<AccelerationStructureCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV &
                            operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>( &rhs );
      return *this;
    }

    AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureCreateInfoNV &
      setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
    {
      compactedSize = compactedSize_;
      return *this;
    }

    AccelerationStructureCreateInfoNV &
      setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
    {
      info = info_;
      return *this;
    }

    operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( this );
    }

    operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default;
#else
    bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) &&
             ( info == rhs.info );
    }

    bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType         = StructureType::eAccelerationStructureCreateInfoNV;
    const void *                                      pNext         = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  compactedSize = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info          = {};
  };
  static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoNV>
  {
    using Type = AccelerationStructureCreateInfoNV;
  };

  struct AccelerationStructureDeviceAddressInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureDeviceAddressInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT
      : accelerationStructure( accelerationStructure_ )
    {}

    VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(
      AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : AccelerationStructureDeviceAddressInfoKHR(
          *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR &
                            operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureDeviceAddressInfoKHR &
      operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure(
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }

    operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( this );
    }

    operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure );
    }

    bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
    const void *                                   pNext = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
  };
  static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) ==
                   sizeof( VkAccelerationStructureDeviceAddressInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureDeviceAddressInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureDeviceAddressInfoKHR>
  {
    using Type = AccelerationStructureDeviceAddressInfoKHR;
  };

  struct TransformMatrixKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      TransformMatrixKHR( std::array<std::array<float, 4>, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT
      : matrix( matrix_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : TransformMatrixKHR( *reinterpret_cast<TransformMatrixKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR &
                            operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const *>( &rhs );
      return *this;
    }

    TransformMatrixKHR & setMatrix( std::array<std::array<float, 4>, 3> matrix_ ) VULKAN_HPP_NOEXCEPT
    {
      matrix = matrix_;
      return *this;
    }

    operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTransformMatrixKHR *>( this );
    }

    operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTransformMatrixKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TransformMatrixKHR const & ) const = default;
#else
    bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( matrix == rhs.matrix );
    }

    bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> matrix = {};
  };
  static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<TransformMatrixKHR>::value, "struct wrapper is not a standard layout!" );
  using TransformMatrixNV = TransformMatrixKHR;

  struct AccelerationStructureInstanceKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_           = {},
                                        uint32_t                                 instanceCustomIndex_ = {},
                                        uint32_t                                 mask_                = {},
                                        uint32_t instanceShaderBindingTableRecordOffset_              = {},
                                        VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_         = {},
                                        uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT
      : transform( transform_ )
      , instanceCustomIndex( instanceCustomIndex_ )
      , mask( mask_ )
      , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ )
      , flags( flags_ )
      , accelerationStructureReference( accelerationStructureReference_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureInstanceKHR( *reinterpret_cast<AccelerationStructureInstanceKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR &
                            operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureInstanceKHR &
      setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }

    AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCustomIndex = instanceCustomIndex_;
      return *this;
    }

    AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
    {
      mask = mask_;
      return *this;
    }

    AccelerationStructureInstanceKHR &
      setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
      return *this;
    }

    AccelerationStructureInstanceKHR &
      setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR *>( &flags_ );
      return *this;
    }

    AccelerationStructureInstanceKHR &
      setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureReference = accelerationStructureReference_;
      return *this;
    }

    operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureInstanceKHR *>( this );
    }

    operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureInstanceKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) &&
             ( mask == rhs.mask ) &&
             ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) &&
             ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference );
    }

    bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {};
    uint32_t                                 instanceCustomIndex : 24;
    uint32_t                                 mask : 8;
    uint32_t                                 instanceShaderBindingTableRecordOffset : 24;
    VkGeometryInstanceFlagsKHR               flags : 8;
    uint64_t                                 accelerationStructureReference = {};
  };
  static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureInstanceKHR>::value,
                 "struct wrapper is not a standard layout!" );
  using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;

  class AccelerationStructureNV
  {
  public:
    using CType = VkAccelerationStructureNV;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV;

  public:
    VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default;
    VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT
      AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT
      : m_accelerationStructureNV( accelerationStructureNV )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    AccelerationStructureNV & operator=( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT
    {
      m_accelerationStructureNV = accelerationStructureNV;
      return *this;
    }
#endif

    AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_accelerationStructureNV = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureNV const & ) const = default;
#else
    bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureNV == rhs.m_accelerationStructureNV;
    }

    bool operator!=( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureNV != rhs.m_accelerationStructureNV;
    }

    bool operator<( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureNV < rhs.m_accelerationStructureNV;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureNV;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureNV != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_accelerationStructureNV == VK_NULL_HANDLE;
    }

  private:
    VkAccelerationStructureNV m_accelerationStructureNV = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eAccelerationStructureNV>
  {
    using type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV>
  {
    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV>
  {
    using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct AccelerationStructureMemoryRequirementsInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureMemoryRequirementsInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(
      VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ =
        VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject,
      VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT
      : type( type_ )
      , accelerationStructure( accelerationStructure_ )
    {}

    VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(
      AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : AccelerationStructureMemoryRequirementsInfoNV(
          *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV &
                            operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureMemoryRequirementsInfoNV &
      operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
      return *this;
    }

    AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureMemoryRequirementsInfoNV &
      setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure(
      VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }

    operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
    }

    operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default;
#else
    bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) &&
             ( accelerationStructure == rhs.accelerationStructure );
    }

    bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type =
      VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject;
    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
  };
  static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) ==
                   sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureMemoryRequirementsInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoNV>
  {
    using Type = AccelerationStructureMemoryRequirementsInfoNV;
  };

  struct AccelerationStructureVersionInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAccelerationStructureVersionInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {} ) VULKAN_HPP_NOEXCEPT
      : pVersionData( pVersionData_ )
    {}

    VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AccelerationStructureVersionInfoKHR( *reinterpret_cast<AccelerationStructureVersionInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR &
                            operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AccelerationStructureVersionInfoKHR &
      operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const *>( &rhs );
      return *this;
    }

    AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT
    {
      pVersionData = pVersionData_;
      return *this;
    }

    operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( this );
    }

    operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAccelerationStructureVersionInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default;
#else
    bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData );
    }

    bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType        = StructureType::eAccelerationStructureVersionInfoKHR;
    const void *                        pNext        = {};
    const uint8_t *                     pVersionData = {};
  };
  static_assert( sizeof( AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AccelerationStructureVersionInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAccelerationStructureVersionInfoKHR>
  {
    using Type = AccelerationStructureVersionInfoKHR;
  };

  class SwapchainKHR
  {
  public:
    using CType = VkSwapchainKHR;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;

  public:
    VULKAN_HPP_CONSTEXPR         SwapchainKHR() = default;
    VULKAN_HPP_CONSTEXPR         SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
      : m_swapchainKHR( swapchainKHR )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
    {
      m_swapchainKHR = swapchainKHR;
      return *this;
    }
#endif

    SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_swapchainKHR = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainKHR const & ) const = default;
#else
    bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_swapchainKHR == rhs.m_swapchainKHR;
    }

    bool operator!=( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_swapchainKHR != rhs.m_swapchainKHR;
    }

    bool operator<( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_swapchainKHR < rhs.m_swapchainKHR;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT
    {
      return m_swapchainKHR;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_swapchainKHR != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_swapchainKHR == VK_NULL_HANDLE;
    }

  private:
    VkSwapchainKHR m_swapchainKHR = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSwapchainKHR>
  {
    using type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SwapchainKHR>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  class Semaphore
  {
  public:
    using CType = VkSemaphore;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;

  public:
    VULKAN_HPP_CONSTEXPR         Semaphore() = default;
    VULKAN_HPP_CONSTEXPR         Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT : m_semaphore( semaphore ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Semaphore & operator=( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT
    {
      m_semaphore = semaphore;
      return *this;
    }
#endif

    Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_semaphore = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Semaphore const & ) const = default;
#else
    bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_semaphore == rhs.m_semaphore;
    }

    bool operator!=( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_semaphore != rhs.m_semaphore;
    }

    bool operator<( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_semaphore < rhs.m_semaphore;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT
    {
      return m_semaphore;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_semaphore != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_semaphore == VK_NULL_HANDLE;
    }

  private:
    VkSemaphore m_semaphore = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSemaphore>
  {
    using type = VULKAN_HPP_NAMESPACE::Semaphore;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore>
  {
    using Type = VULKAN_HPP_NAMESPACE::Semaphore;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore>
  {
    using Type = VULKAN_HPP_NAMESPACE::Semaphore;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Semaphore>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  class Fence
  {
  public:
    using CType = VkFence;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eFence;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;

  public:
    VULKAN_HPP_CONSTEXPR         Fence() = default;
    VULKAN_HPP_CONSTEXPR         Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT : m_fence( fence ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Fence & operator=( VkFence fence ) VULKAN_HPP_NOEXCEPT
    {
      m_fence = fence;
      return *this;
    }
#endif

    Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_fence = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Fence const & ) const = default;
#else
    bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_fence == rhs.m_fence;
    }

    bool operator!=( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_fence != rhs.m_fence;
    }

    bool operator<( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_fence < rhs.m_fence;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT
    {
      return m_fence;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_fence != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_fence == VK_NULL_HANDLE;
    }

  private:
    VkFence m_fence = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eFence>
  {
    using type = VULKAN_HPP_NAMESPACE::Fence;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFence>
  {
    using Type = VULKAN_HPP_NAMESPACE::Fence;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence>
  {
    using Type = VULKAN_HPP_NAMESPACE::Fence;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Fence>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct AcquireNextImageInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
                                                  uint64_t                           timeout_   = {},
                                                  VULKAN_HPP_NAMESPACE::Semaphore    semaphore_ = {},
                                                  VULKAN_HPP_NAMESPACE::Fence        fence_     = {},
                                                  uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : swapchain( swapchain_ )
      , timeout( timeout_ )
      , semaphore( semaphore_ )
      , fence( fence_ )
      , deviceMask( deviceMask_ )
    {}

    VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AcquireNextImageInfoKHR( *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR &
                            operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>( &rhs );
      return *this;
    }

    AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchain = swapchain_;
      return *this;
    }

    AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
    {
      timeout = timeout_;
      return *this;
    }

    AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }

    operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAcquireNextImageInfoKHR *>( this );
    }

    operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAcquireNextImageInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AcquireNextImageInfoKHR const & ) const = default;
#else
    bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) &&
             ( timeout == rhs.timeout ) && ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) &&
             ( deviceMask == rhs.deviceMask );
    }

    bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType      = StructureType::eAcquireNextImageInfoKHR;
    const void *                        pNext      = {};
    VULKAN_HPP_NAMESPACE::SwapchainKHR  swapchain  = {};
    uint64_t                            timeout    = {};
    VULKAN_HPP_NAMESPACE::Semaphore     semaphore  = {};
    VULKAN_HPP_NAMESPACE::Fence         fence      = {};
    uint32_t                            deviceMask = {};
  };
  static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
  {
    using Type = AcquireNextImageInfoKHR;
  };

  struct AcquireProfilingLockInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {},
                                                      uint64_t timeout_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , timeout( timeout_ )
    {}

    VULKAN_HPP_CONSTEXPR
      AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AcquireProfilingLockInfoKHR( *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR &
                            operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>( &rhs );
      return *this;
    }

    AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AcquireProfilingLockInfoKHR &
      setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
    {
      timeout = timeout_;
      return *this;
    }

    operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( this );
    }

    operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAcquireProfilingLockInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default;
#else
    bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout );
    }

    bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType   = StructureType::eAcquireProfilingLockInfoKHR;
    const void *                                       pNext   = {};
    VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags   = {};
    uint64_t                                           timeout = {};
  };
  static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AcquireProfilingLockInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
  {
    using Type = AcquireProfilingLockInfoKHR;
  };

  struct AllocationCallbacks
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AllocationCallbacks( void *                               pUserData_             = {},
                                              PFN_vkAllocationFunction             pfnAllocation_         = {},
                                              PFN_vkReallocationFunction           pfnReallocation_       = {},
                                              PFN_vkFreeFunction                   pfnFree_               = {},
                                              PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {},
                                              PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT
      : pUserData( pUserData_ )
      , pfnAllocation( pfnAllocation_ )
      , pfnReallocation( pfnReallocation_ )
      , pfnFree( pfnFree_ )
      , pfnInternalAllocation( pfnInternalAllocation_ )
      , pfnInternalFree( pfnInternalFree_ )
    {}

    VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
      : AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks &
                            operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>( &rhs );
      return *this;
    }

    AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }

    AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnAllocation = pfnAllocation_;
      return *this;
    }

    AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnReallocation = pfnReallocation_;
      return *this;
    }

    AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnFree = pfnFree_;
      return *this;
    }

    AllocationCallbacks &
      setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnInternalAllocation = pfnInternalAllocation_;
      return *this;
    }

    AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnInternalFree = pfnInternalFree_;
      return *this;
    }

    operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAllocationCallbacks *>( this );
    }

    operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAllocationCallbacks *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AllocationCallbacks const & ) const = default;
#else
    bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) &&
             ( pfnReallocation == rhs.pfnReallocation ) && ( pfnFree == rhs.pfnFree ) &&
             ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree );
    }

    bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    void *                               pUserData             = {};
    PFN_vkAllocationFunction             pfnAllocation         = {};
    PFN_vkReallocationFunction           pfnReallocation       = {};
    PFN_vkFreeFunction                   pfnFree               = {};
    PFN_vkInternalAllocationNotification pfnInternalAllocation = {};
    PFN_vkInternalFreeNotification       pfnInternalFree       = {};
  };
  static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );

  struct ComponentMapping
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
                        VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
                        VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity,
                        VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity )
        VULKAN_HPP_NOEXCEPT
      : r( r_ )
      , g( g_ )
      , b( b_ )
      , a( a_ )
    {}

    VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
      : ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>( &rhs );
      return *this;
    }

    ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
    {
      r = r_;
      return *this;
    }

    ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
    {
      g = g_;
      return *this;
    }

    ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
    {
      b = b_;
      return *this;
    }

    ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
    {
      a = a_;
      return *this;
    }

    operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkComponentMapping *>( this );
    }

    operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkComponentMapping *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ComponentMapping const & ) const = default;
#else
    bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a );
    }

    bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
    VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
    VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
    VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
  };
  static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ComponentMapping>::value, "struct wrapper is not a standard layout!" );

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  struct AndroidHardwareBufferFormatPropertiesANDROID
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(
      VULKAN_HPP_NAMESPACE::Format                      format_         = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      uint64_t                                          externalFormat_ = {},
      VULKAN_HPP_NAMESPACE::FormatFeatureFlags          formatFeatures_ = {},
      VULKAN_HPP_NAMESPACE::ComponentMapping            samplerYcbcrConversionComponents_ = {},
      VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ =
        VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
      VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
      VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
      VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ =
        VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT
      : format( format_ )
      , externalFormat( externalFormat_ )
      , formatFeatures( formatFeatures_ )
      , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ )
      , suggestedYcbcrModel( suggestedYcbcrModel_ )
      , suggestedYcbcrRange( suggestedYcbcrRange_ )
      , suggestedXChromaOffset( suggestedXChromaOffset_ )
      , suggestedYChromaOffset( suggestedYChromaOffset_ )
    {}

    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(
      AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs )
      VULKAN_HPP_NOEXCEPT
      : AndroidHardwareBufferFormatPropertiesANDROID(
          *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferFormatPropertiesANDROID &
                            operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferFormatPropertiesANDROID &
      operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
      return *this;
    }

    operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default;
#  else
    bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) &&
             ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) &&
             ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) &&
             ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) &&
             ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) &&
             ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
    }

    bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType  = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
    void *                                   pNext  = {};
    VULKAN_HPP_NAMESPACE::Format             format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    uint64_t                                 externalFormat                   = {};
    VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures                   = {};
    VULKAN_HPP_NAMESPACE::ComponentMapping   samplerYcbcrConversionComponents = {};
    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel =
      VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange    = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
    VULKAN_HPP_NAMESPACE::ChromaLocation    suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
    VULKAN_HPP_NAMESPACE::ChromaLocation    suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
  };
  static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) ==
                   sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AndroidHardwareBufferFormatPropertiesANDROID>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
  {
    using Type = AndroidHardwareBufferFormatPropertiesANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  struct AndroidHardwareBufferPropertiesANDROID
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAndroidHardwareBufferPropertiesANDROID;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
                                                                 uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
      : allocationSize( allocationSize_ )
      , memoryTypeBits( memoryTypeBits_ )
    {}

    VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : AndroidHardwareBufferPropertiesANDROID(
          *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferPropertiesANDROID &
                            operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferPropertiesANDROID &
      operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
      return *this;
    }

    operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID *>( this );
    }

    operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default;
#  else
    bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) &&
             ( memoryTypeBits == rhs.memoryTypeBits );
    }

    bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eAndroidHardwareBufferPropertiesANDROID;
    void *                              pNext          = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    allocationSize = {};
    uint32_t                            memoryTypeBits = {};
  };
  static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AndroidHardwareBufferPropertiesANDROID>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
  {
    using Type = AndroidHardwareBufferPropertiesANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  struct AndroidHardwareBufferUsageANDROID
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAndroidHardwareBufferUsageANDROID;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT
      : androidHardwareBufferUsage( androidHardwareBufferUsage_ )
    {}

    VULKAN_HPP_CONSTEXPR
      AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : AndroidHardwareBufferUsageANDROID( *reinterpret_cast<AndroidHardwareBufferUsageANDROID const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferUsageANDROID &
                            operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>( &rhs );
      return *this;
    }

    operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID *>( this );
    }

    operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default;
#  else
    bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
    }

    bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                      = StructureType::eAndroidHardwareBufferUsageANDROID;
    void *                              pNext                      = {};
    uint64_t                            androidHardwareBufferUsage = {};
  };
  static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AndroidHardwareBufferUsageANDROID>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
  {
    using Type = AndroidHardwareBufferUsageANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  struct AndroidSurfaceCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {},
                                                      struct ANativeWindow * window_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , window( window_ )
    {}

    VULKAN_HPP_CONSTEXPR
      AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : AndroidSurfaceCreateInfoKHR( *reinterpret_cast<AndroidSurfaceCreateInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR &
                            operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

    AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AndroidSurfaceCreateInfoKHR &
      setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }

    operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( this );
    }

    operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default;
#  else
    bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window );
    }

    bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType  = StructureType::eAndroidSurfaceCreateInfoKHR;
    const void *                                       pNext  = {};
    VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags  = {};
    struct ANativeWindow *                             window = {};
  };
  static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AndroidSurfaceCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
  {
    using Type = AndroidSurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

  struct ApplicationInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_   = {},
                                          uint32_t     applicationVersion_ = {},
                                          const char * pEngineName_        = {},
                                          uint32_t     engineVersion_      = {},
                                          uint32_t     apiVersion_         = {} ) VULKAN_HPP_NOEXCEPT
      : pApplicationName( pApplicationName_ )
      , applicationVersion( applicationVersion_ )
      , pEngineName( pEngineName_ )
      , engineVersion( engineVersion_ )
      , apiVersion( apiVersion_ )
    {}

    VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>( &rhs );
      return *this;
    }

    ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) VULKAN_HPP_NOEXCEPT
    {
      pApplicationName = pApplicationName_;
      return *this;
    }

    ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      applicationVersion = applicationVersion_;
      return *this;
    }

    ApplicationInfo & setPEngineName( const char * pEngineName_ ) VULKAN_HPP_NOEXCEPT
    {
      pEngineName = pEngineName_;
      return *this;
    }

    ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      engineVersion = engineVersion_;
      return *this;
    }

    ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      apiVersion = apiVersion_;
      return *this;
    }

    operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkApplicationInfo *>( this );
    }

    operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkApplicationInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ApplicationInfo const & ) const = default;
#else
    bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pApplicationName == rhs.pApplicationName ) &&
             ( applicationVersion == rhs.applicationVersion ) && ( pEngineName == rhs.pEngineName ) &&
             ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion );
    }

    bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType              = StructureType::eApplicationInfo;
    const void *                        pNext              = {};
    const char *                        pApplicationName   = {};
    uint32_t                            applicationVersion = {};
    const char *                        pEngineName        = {};
    uint32_t                            engineVersion      = {};
    uint32_t                            apiVersion         = {};
  };
  static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ApplicationInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eApplicationInfo>
  {
    using Type = ApplicationInfo;
  };

  struct AttachmentDescription
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentDescription(
      VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_         = {},
      VULKAN_HPP_NAMESPACE::Format                     format_        = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits        samples_       = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
      VULKAN_HPP_NAMESPACE::AttachmentLoadOp           loadOp_        = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
      VULKAN_HPP_NAMESPACE::AttachmentStoreOp          storeOp_       = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
      VULKAN_HPP_NAMESPACE::AttachmentLoadOp           stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
      VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_         = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
      VULKAN_HPP_NAMESPACE::ImageLayout       initialLayout_          = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      VULKAN_HPP_NAMESPACE::ImageLayout       finalLayout_            = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
      VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , format( format_ )
      , samples( samples_ )
      , loadOp( loadOp_ )
      , storeOp( storeOp_ )
      , stencilLoadOp( stencilLoadOp_ )
      , stencilStoreOp( stencilStoreOp_ )
      , initialLayout( initialLayout_ )
      , finalLayout( finalLayout_ )
    {}

    VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentDescription( *reinterpret_cast<AttachmentDescription const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription &
                            operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>( &rhs );
      return *this;
    }

    AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
    {
      samples = samples_;
      return *this;
    }

    AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
    {
      loadOp = loadOp_;
      return *this;
    }

    AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
    {
      storeOp = storeOp_;
      return *this;
    }

    AttachmentDescription &
      setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilLoadOp = stencilLoadOp_;
      return *this;
    }

    AttachmentDescription &
      setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilStoreOp = stencilStoreOp_;
      return *this;
    }

    AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      initialLayout = initialLayout_;
      return *this;
    }

    AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      finalLayout = finalLayout_;
      return *this;
    }

    operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentDescription *>( this );
    }

    operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentDescription *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentDescription const & ) const = default;
#else
    bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) &&
             ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) &&
             ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) &&
             ( finalLayout == rhs.finalLayout );
    }

    bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags          = {};
    VULKAN_HPP_NAMESPACE::Format                     format         = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits        samples        = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
    VULKAN_HPP_NAMESPACE::AttachmentLoadOp           loadOp         = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
    VULKAN_HPP_NAMESPACE::AttachmentStoreOp          storeOp        = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
    VULKAN_HPP_NAMESPACE::AttachmentLoadOp           stencilLoadOp  = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
    VULKAN_HPP_NAMESPACE::AttachmentStoreOp          stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
    VULKAN_HPP_NAMESPACE::ImageLayout                initialLayout  = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::ImageLayout                finalLayout    = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  };
  static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AttachmentDescription>::value, "struct wrapper is not a standard layout!" );

  struct AttachmentDescription2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentDescription2(
      VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_         = {},
      VULKAN_HPP_NAMESPACE::Format                     format_        = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits        samples_       = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
      VULKAN_HPP_NAMESPACE::AttachmentLoadOp           loadOp_        = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
      VULKAN_HPP_NAMESPACE::AttachmentStoreOp          storeOp_       = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
      VULKAN_HPP_NAMESPACE::AttachmentLoadOp           stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad,
      VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_         = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore,
      VULKAN_HPP_NAMESPACE::ImageLayout       initialLayout_          = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      VULKAN_HPP_NAMESPACE::ImageLayout       finalLayout_            = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
      VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , format( format_ )
      , samples( samples_ )
      , loadOp( loadOp_ )
      , storeOp( storeOp_ )
      , stencilLoadOp( stencilLoadOp_ )
      , stencilStoreOp( stencilStoreOp_ )
      , initialLayout( initialLayout_ )
      , finalLayout( finalLayout_ )
    {}

    VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentDescription2( *reinterpret_cast<AttachmentDescription2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 &
                            operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>( &rhs );
      return *this;
    }

    AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
    {
      samples = samples_;
      return *this;
    }

    AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
    {
      loadOp = loadOp_;
      return *this;
    }

    AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
    {
      storeOp = storeOp_;
      return *this;
    }

    AttachmentDescription2 &
      setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilLoadOp = stencilLoadOp_;
      return *this;
    }

    AttachmentDescription2 &
      setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilStoreOp = stencilStoreOp_;
      return *this;
    }

    AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      initialLayout = initialLayout_;
      return *this;
    }

    AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      finalLayout = finalLayout_;
      return *this;
    }

    operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentDescription2 *>( this );
    }

    operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentDescription2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentDescription2 const & ) const = default;
#else
    bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) &&
             ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) &&
             ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) &&
             ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout );
    }

    bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType          = StructureType::eAttachmentDescription2;
    const void *                                     pNext          = {};
    VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags          = {};
    VULKAN_HPP_NAMESPACE::Format                     format         = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits        samples        = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
    VULKAN_HPP_NAMESPACE::AttachmentLoadOp           loadOp         = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
    VULKAN_HPP_NAMESPACE::AttachmentStoreOp          storeOp        = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
    VULKAN_HPP_NAMESPACE::AttachmentLoadOp           stencilLoadOp  = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
    VULKAN_HPP_NAMESPACE::AttachmentStoreOp          stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
    VULKAN_HPP_NAMESPACE::ImageLayout                initialLayout  = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::ImageLayout                finalLayout    = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  };
  static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AttachmentDescription2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAttachmentDescription2>
  {
    using Type = AttachmentDescription2;
  };
  using AttachmentDescription2KHR = AttachmentDescription2;

  struct AttachmentDescriptionStencilLayout
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eAttachmentDescriptionStencilLayout;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(
      VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_   = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
      VULKAN_HPP_NOEXCEPT
      : stencilInitialLayout( stencilInitialLayout_ )
      , stencilFinalLayout( stencilFinalLayout_ )
    {}

    VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentDescriptionStencilLayout( *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout &
                            operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentDescriptionStencilLayout &
      operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
      return *this;
    }

    AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AttachmentDescriptionStencilLayout &
      setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilInitialLayout = stencilInitialLayout_;
      return *this;
    }

    AttachmentDescriptionStencilLayout &
      setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilFinalLayout = stencilFinalLayout_;
      return *this;
    }

    operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout *>( this );
    }

    operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentDescriptionStencilLayout *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default;
#else
    bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) &&
             ( stencilFinalLayout == rhs.stencilFinalLayout );
    }

    bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                = StructureType::eAttachmentDescriptionStencilLayout;
    void *                              pNext                = {};
    VULKAN_HPP_NAMESPACE::ImageLayout   stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::ImageLayout   stencilFinalLayout   = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  };
  static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AttachmentDescriptionStencilLayout>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
  {
    using Type = AttachmentDescriptionStencilLayout;
  };
  using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;

  struct AttachmentReference
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentReference(
      uint32_t                          attachment_ = {},
      VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
      : attachment( attachment_ )
      , layout( layout_ )
    {}

    VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AttachmentReference &
                            operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>( &rhs );
      return *this;
    }

    AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
    {
      attachment = attachment_;
      return *this;
    }

    AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentReference *>( this );
    }

    operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentReference *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentReference const & ) const = default;
#else
    bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( attachment == rhs.attachment ) && ( layout == rhs.layout );
    }

    bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                          attachment = {};
    VULKAN_HPP_NAMESPACE::ImageLayout layout     = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  };
  static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AttachmentReference>::value, "struct wrapper is not a standard layout!" );

  struct AttachmentReference2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AttachmentReference2( uint32_t                          attachment_ = {},
                            VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
                            VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : attachment( attachment_ )
      , layout( layout_ )
      , aspectMask( aspectMask_ )
    {}

    VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentReference2( *reinterpret_cast<AttachmentReference2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 &
                            operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>( &rhs );
      return *this;
    }

    AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
    {
      attachment = attachment_;
      return *this;
    }

    AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentReference2 *>( this );
    }

    operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentReference2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentReference2 const & ) const = default;
#else
    bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) &&
             ( layout == rhs.layout ) && ( aspectMask == rhs.aspectMask );
    }

    bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType      = StructureType::eAttachmentReference2;
    const void *                           pNext      = {};
    uint32_t                               attachment = {};
    VULKAN_HPP_NAMESPACE::ImageLayout      layout     = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
  };
  static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AttachmentReference2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAttachmentReference2>
  {
    using Type = AttachmentReference2;
  };
  using AttachmentReference2KHR = AttachmentReference2;

  struct AttachmentReferenceStencilLayout
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ =
                                          VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
      : stencilLayout( stencilLayout_ )
    {}

    VULKAN_HPP_CONSTEXPR
      AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentReferenceStencilLayout( *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout &
                            operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>( &rhs );
      return *this;
    }

    AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    AttachmentReferenceStencilLayout &
      setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilLayout = stencilLayout_;
      return *this;
    }

    operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentReferenceStencilLayout *>( this );
    }

    operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentReferenceStencilLayout *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default;
#else
    bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout );
    }

    bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::eAttachmentReferenceStencilLayout;
    void *                              pNext         = {};
    VULKAN_HPP_NAMESPACE::ImageLayout   stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  };
  static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AttachmentReferenceStencilLayout>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
  {
    using Type = AttachmentReferenceStencilLayout;
  };
  using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;

  struct Extent2D
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT
      : width( width_ )
      , height( height_ )
    {}

    VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast<Extent2D const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>( &rhs );
      return *this;
    }

    Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExtent2D *>( this );
    }

    operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExtent2D *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Extent2D const & ) const = default;
#else
    bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( width == rhs.width ) && ( height == rhs.height );
    }

    bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t width  = {};
    uint32_t height = {};
  };
  static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<Extent2D>::value, "struct wrapper is not a standard layout!" );

  struct SampleLocationEXT
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT
      : x( x_ )
      , y( y_ )
    {}

    VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT &
                            operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>( &rhs );
      return *this;
    }

    SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSampleLocationEXT *>( this );
    }

    operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSampleLocationEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SampleLocationEXT const & ) const = default;
#else
    bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( x == rhs.x ) && ( y == rhs.y );
    }

    bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float x = {};
    float y = {};
  };
  static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );

  struct SampleLocationsInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ =
        VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
      VULKAN_HPP_NAMESPACE::Extent2D                  sampleLocationGridSize_ = {},
      uint32_t                                        sampleLocationsCount_   = {},
      const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_       = {} ) VULKAN_HPP_NOEXCEPT
      : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
      , sampleLocationGridSize( sampleLocationGridSize_ )
      , sampleLocationsCount( sampleLocationsCount_ )
      , pSampleLocations( pSampleLocations_ )
    {}

    VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SampleLocationsInfoEXT( *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SampleLocationsInfoEXT(
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_,
      VULKAN_HPP_NAMESPACE::Extent2D            sampleLocationGridSize_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const &
        sampleLocations_ )
      : sampleLocationsPerPixel( sampleLocationsPerPixel_ )
      , sampleLocationGridSize( sampleLocationGridSize_ )
      , sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) )
      , pSampleLocations( sampleLocations_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT &
                            operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>( &rhs );
      return *this;
    }

    SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SampleLocationsInfoEXT & setSampleLocationsPerPixel(
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsPerPixel = sampleLocationsPerPixel_;
      return *this;
    }

    SampleLocationsInfoEXT &
      setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationGridSize = sampleLocationGridSize_;
      return *this;
    }

    SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsCount = sampleLocationsCount_;
      return *this;
    }

    SampleLocationsInfoEXT &
      setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      pSampleLocations = pSampleLocations_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SampleLocationsInfoEXT & setSampleLocations(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const &
        sampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
      pSampleLocations     = sampleLocations_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSampleLocationsInfoEXT *>( this );
    }

    operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSampleLocationsInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SampleLocationsInfoEXT const & ) const = default;
#else
    bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) &&
             ( sampleLocationGridSize == rhs.sampleLocationGridSize ) &&
             ( sampleLocationsCount == rhs.sampleLocationsCount ) && ( pSampleLocations == rhs.pSampleLocations );
    }

    bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType                   = StructureType::eSampleLocationsInfoEXT;
    const void *                              pNext                   = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
    VULKAN_HPP_NAMESPACE::Extent2D            sampleLocationGridSize  = {};
    uint32_t                                  sampleLocationsCount    = {};
    const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations  = {};
  };
  static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
  {
    using Type = SampleLocationsInfoEXT;
  };

  struct AttachmentSampleLocationsEXT
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(
      uint32_t                                     attachmentIndex_     = {},
      VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : attachmentIndex( attachmentIndex_ )
      , sampleLocationsInfo( sampleLocationsInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR
      AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : AttachmentSampleLocationsEXT( *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT &
                            operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>( &rhs );
      return *this;
    }

    AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentIndex = attachmentIndex_;
      return *this;
    }

    AttachmentSampleLocationsEXT & setSampleLocationsInfo(
      VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsInfo = sampleLocationsInfo_;
      return *this;
    }

    operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkAttachmentSampleLocationsEXT *>( this );
    }

    operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkAttachmentSampleLocationsEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default;
#else
    bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
    }

    bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                     attachmentIndex     = {};
    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
  };
  static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<AttachmentSampleLocationsEXT>::value,
                 "struct wrapper is not a standard layout!" );

  struct BaseInStructure
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ =
                       VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ )
    {}

    BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
      : BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>( &rhs );
      return *this;
    }

    BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBaseInStructure *>( this );
    }

    operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBaseInStructure *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BaseInStructure const & ) const = default;
#else
    bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
    }

    bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                  sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
    const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext = {};
  };
  static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BaseInStructure>::value, "struct wrapper is not a standard layout!" );

  struct BaseOutStructure
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ =
                        VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo ) VULKAN_HPP_NOEXCEPT : sType( sType_ )
    {}

    BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
      : BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>( &rhs );
      return *this;
    }

    BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBaseOutStructure *>( this );
    }

    operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBaseOutStructure *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BaseOutStructure const & ) const = default;
#else
    bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
    }

    bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType             sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
    struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext = {};
  };
  static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BaseOutStructure>::value, "struct wrapper is not a standard layout!" );

  class DeviceMemory
  {
  public:
    using CType = VkDeviceMemory;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;

  public:
    VULKAN_HPP_CONSTEXPR         DeviceMemory() = default;
    VULKAN_HPP_CONSTEXPR         DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
      : m_deviceMemory( deviceMemory )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
    {
      m_deviceMemory = deviceMemory;
      return *this;
    }
#endif

    DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_deviceMemory = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceMemory const & ) const = default;
#else
    bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_deviceMemory == rhs.m_deviceMemory;
    }

    bool operator!=( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_deviceMemory != rhs.m_deviceMemory;
    }

    bool operator<( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_deviceMemory < rhs.m_deviceMemory;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT
    {
      return m_deviceMemory;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_deviceMemory != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_deviceMemory == VK_NULL_HANDLE;
    }

  private:
    VkDeviceMemory m_deviceMemory = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDeviceMemory>
  {
    using type = VULKAN_HPP_NAMESPACE::DeviceMemory;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory>
  {
    using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory>
  {
    using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeviceMemory>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct BindAccelerationStructureMemoryInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eBindAccelerationStructureMemoryInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {},
                                             VULKAN_HPP_NAMESPACE::DeviceMemory            memory_                = {},
                                             VULKAN_HPP_NAMESPACE::DeviceSize              memoryOffset_          = {},
                                             uint32_t                                      deviceIndexCount_      = {},
                                             const uint32_t * pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
      : accelerationStructure( accelerationStructure_ )
      , memory( memory_ )
      , memoryOffset( memoryOffset_ )
      , deviceIndexCount( deviceIndexCount_ )
      , pDeviceIndices( pDeviceIndices_ )
    {}

    VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindAccelerationStructureMemoryInfoNV(
          *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindAccelerationStructureMemoryInfoNV(
      VULKAN_HPP_NAMESPACE::AccelerationStructureNV                         accelerationStructure_,
      VULKAN_HPP_NAMESPACE::DeviceMemory                                    memory_,
      VULKAN_HPP_NAMESPACE::DeviceSize                                      memoryOffset_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
      : accelerationStructure( accelerationStructure_ )
      , memory( memory_ )
      , memoryOffset( memoryOffset_ )
      , deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
      , pDeviceIndices( deviceIndices_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV &
                            operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindAccelerationStructureMemoryInfoNV &
      operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>( &rhs );
      return *this;
    }

    BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BindAccelerationStructureMemoryInfoNV & setAccelerationStructure(
      VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }

    BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    BindAccelerationStructureMemoryInfoNV &
      setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = deviceIndexCount_;
      return *this;
    }

    BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceIndices = pDeviceIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindAccelerationStructureMemoryInfoNV & setDeviceIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
      pDeviceIndices   = deviceIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( this );
    }

    operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default;
#else
    bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) &&
             ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) &&
             ( pDeviceIndices == rhs.pDeviceIndices );
    }

    bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType           sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
    const void *                                  pNext = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory            memory                = {};
    VULKAN_HPP_NAMESPACE::DeviceSize              memoryOffset          = {};
    uint32_t                                      deviceIndexCount      = {};
    const uint32_t *                              pDeviceIndices        = {};
  };
  static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindAccelerationStructureMemoryInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBindAccelerationStructureMemoryInfoNV>
  {
    using Type = BindAccelerationStructureMemoryInfoNV;
  };

  struct BindBufferMemoryDeviceGroupInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t         deviceIndexCount_ = {},
                                                          const uint32_t * pDeviceIndices_   = {} ) VULKAN_HPP_NOEXCEPT
      : deviceIndexCount( deviceIndexCount_ )
      , pDeviceIndices( pDeviceIndices_ )
    {}

    VULKAN_HPP_CONSTEXPR
      BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindBufferMemoryDeviceGroupInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
      : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo &
                            operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>( &rhs );
      return *this;
    }

    BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = deviceIndexCount_;
      return *this;
    }

    BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceIndices = pDeviceIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindBufferMemoryDeviceGroupInfo & setDeviceIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
      pDeviceIndices   = deviceIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo *>( this );
    }

    operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default;
#else
    bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) &&
             ( pDeviceIndices == rhs.pDeviceIndices );
    }

    bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::eBindBufferMemoryDeviceGroupInfo;
    const void *                        pNext            = {};
    uint32_t                            deviceIndexCount = {};
    const uint32_t *                    pDeviceIndices   = {};
  };
  static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindBufferMemoryDeviceGroupInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
  {
    using Type = BindBufferMemoryDeviceGroupInfo;
  };
  using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;

  struct BindBufferMemoryInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer       buffer_     = {},
                                               VULKAN_HPP_NAMESPACE::DeviceMemory memory_     = {},
                                               VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
      : buffer( buffer_ )
      , memory( memory_ )
      , memoryOffset( memoryOffset_ )
    {}

    VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindBufferMemoryInfo( *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo &
                            operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>( &rhs );
      return *this;
    }

    BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindBufferMemoryInfo *>( this );
    }

    operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindBufferMemoryInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindBufferMemoryInfo const & ) const = default;
#else
    bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) &&
             ( memoryOffset == rhs.memoryOffset );
    }

    bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType        = StructureType::eBindBufferMemoryInfo;
    const void *                        pNext        = {};
    VULKAN_HPP_NAMESPACE::Buffer        buffer       = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory  memory       = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    memoryOffset = {};
  };
  static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
  {
    using Type = BindBufferMemoryInfo;
  };
  using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;

  struct Offset2D
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT
      : x( x_ )
      , y( y_ )
    {}

    VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast<Offset2D const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>( &rhs );
      return *this;
    }

    Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOffset2D *>( this );
    }

    operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOffset2D *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Offset2D const & ) const = default;
#else
    bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( x == rhs.x ) && ( y == rhs.y );
    }

    bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    int32_t x = {};
    int32_t y = {};
  };
  static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<Offset2D>::value, "struct wrapper is not a standard layout!" );

  struct Rect2D
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {},
                                 VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT
      : offset( offset_ )
      , extent( extent_ )
    {}

    VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast<Rect2D const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>( &rhs );
      return *this;
    }

    Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRect2D *>( this );
    }

    operator VkRect2D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRect2D *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Rect2D const & ) const = default;
#else
    bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( offset == rhs.offset ) && ( extent == rhs.extent );
    }

    bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Offset2D offset = {};
    VULKAN_HPP_NAMESPACE::Extent2D extent = {};
  };
  static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<Rect2D>::value, "struct wrapper is not a standard layout!" );

  struct BindImageMemoryDeviceGroupInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(
      uint32_t                             deviceIndexCount_             = {},
      const uint32_t *                     pDeviceIndices_               = {},
      uint32_t                             splitInstanceBindRegionCount_ = {},
      const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_    = {} ) VULKAN_HPP_NOEXCEPT
      : deviceIndexCount( deviceIndexCount_ )
      , pDeviceIndices( pDeviceIndices_ )
      , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ )
      , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
    {}

    VULKAN_HPP_CONSTEXPR
      BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindImageMemoryDeviceGroupInfo( *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindImageMemoryDeviceGroupInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const &
        splitInstanceBindRegions_ = {} )
      : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) )
      , pDeviceIndices( deviceIndices_.data() )
      , splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) )
      , pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo &
                            operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>( &rhs );
      return *this;
    }

    BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = deviceIndexCount_;
      return *this;
    }

    BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceIndices = pDeviceIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindImageMemoryDeviceGroupInfo & setDeviceIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
      pDeviceIndices   = deviceIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindImageMemoryDeviceGroupInfo &
      setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
      return *this;
    }

    BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions(
      const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const &
        splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
      pSplitInstanceBindRegions    = splitInstanceBindRegions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo *>( this );
    }

    operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default;
#else
    bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) &&
             ( pDeviceIndices == rhs.pDeviceIndices ) &&
             ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) &&
             ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
    }

    bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType  sType                        = StructureType::eBindImageMemoryDeviceGroupInfo;
    const void *                         pNext                        = {};
    uint32_t                             deviceIndexCount             = {};
    const uint32_t *                     pDeviceIndices               = {};
    uint32_t                             splitInstanceBindRegionCount = {};
    const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions    = {};
  };
  static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindImageMemoryDeviceGroupInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
  {
    using Type = BindImageMemoryDeviceGroupInfo;
  };
  using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;

  class Image
  {
  public:
    using CType = VkImage;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eImage;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;

  public:
    VULKAN_HPP_CONSTEXPR         Image() = default;
    VULKAN_HPP_CONSTEXPR         Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT : m_image( image ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Image & operator=( VkImage image ) VULKAN_HPP_NOEXCEPT
    {
      m_image = image;
      return *this;
    }
#endif

    Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_image = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Image const & ) const = default;
#else
    bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_image == rhs.m_image;
    }

    bool operator!=( Image const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_image != rhs.m_image;
    }

    bool operator<( Image const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_image < rhs.m_image;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT
    {
      return m_image;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_image != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_image == VK_NULL_HANDLE;
    }

  private:
    VkImage m_image = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eImage>
  {
    using type = VULKAN_HPP_NAMESPACE::Image;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImage>
  {
    using Type = VULKAN_HPP_NAMESPACE::Image;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage>
  {
    using Type = VULKAN_HPP_NAMESPACE::Image;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Image>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct BindImageMemoryInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image        image_      = {},
                                              VULKAN_HPP_NAMESPACE::DeviceMemory memory_     = {},
                                              VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT
      : image( image_ )
      , memory( memory_ )
      , memoryOffset( memoryOffset_ )
    {}

    VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo &
                            operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>( &rhs );
      return *this;
    }

    BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindImageMemoryInfo *>( this );
    }

    operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindImageMemoryInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindImageMemoryInfo const & ) const = default;
#else
    bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) &&
             ( memoryOffset == rhs.memoryOffset );
    }

    bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType        = StructureType::eBindImageMemoryInfo;
    const void *                        pNext        = {};
    VULKAN_HPP_NAMESPACE::Image         image        = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory  memory       = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    memoryOffset = {};
  };
  static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
  {
    using Type = BindImageMemoryInfo;
  };
  using BindImageMemoryInfoKHR = BindImageMemoryInfo;

  struct BindImageMemorySwapchainInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {},
                                                          uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : swapchain( swapchain_ )
      , imageIndex( imageIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindImageMemorySwapchainInfoKHR( *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR &
                            operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>( &rhs );
      return *this;
    }

    BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchain = swapchain_;
      return *this;
    }

    BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      imageIndex = imageIndex_;
      return *this;
    }

    operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR *>( this );
    }

    operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default;
#else
    bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) &&
             ( imageIndex == rhs.imageIndex );
    }

    bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType      = StructureType::eBindImageMemorySwapchainInfoKHR;
    const void *                        pNext      = {};
    VULKAN_HPP_NAMESPACE::SwapchainKHR  swapchain  = {};
    uint32_t                            imageIndex = {};
  };
  static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindImageMemorySwapchainInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
  {
    using Type = BindImageMemorySwapchainInfoKHR;
  };

  struct BindImagePlaneMemoryInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ =
                                  VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
      : planeAspect( planeAspect_ )
    {}

    VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindImagePlaneMemoryInfo( *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo &
                            operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>( &rhs );
      return *this;
    }

    BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BindImagePlaneMemoryInfo &
      setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
    {
      planeAspect = planeAspect_;
      return *this;
    }

    operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindImagePlaneMemoryInfo *>( this );
    }

    operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindImagePlaneMemoryInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default;
#else
    bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect );
    }

    bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType       = StructureType::eBindImagePlaneMemoryInfo;
    const void *                              pNext       = {};
    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
  };
  static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
  {
    using Type = BindImagePlaneMemoryInfo;
  };
  using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;

  struct BindIndexBufferIndirectCommandNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(
      VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {},
      uint32_t                            size_          = {},
      VULKAN_HPP_NAMESPACE::IndexType     indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT
      : bufferAddress( bufferAddress_ )
      , size( size_ )
      , indexType( indexType_ )
    {}

    VULKAN_HPP_CONSTEXPR
      BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindIndexBufferIndirectCommandNV( *reinterpret_cast<BindIndexBufferIndirectCommandNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV &
                            operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const *>( &rhs );
      return *this;
    }

    BindIndexBufferIndirectCommandNV &
      setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferAddress = bufferAddress_;
      return *this;
    }

    BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
    {
      indexType = indexType_;
      return *this;
    }

    operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV *>( this );
    }

    operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default;
#else
    bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType );
    }

    bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
    uint32_t                            size          = {};
    VULKAN_HPP_NAMESPACE::IndexType     indexType     = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
  };
  static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindIndexBufferIndirectCommandNV>::value,
                 "struct wrapper is not a standard layout!" );

  struct BindShaderGroupIndirectCommandNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : groupIndex( groupIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindShaderGroupIndirectCommandNV( *reinterpret_cast<BindShaderGroupIndirectCommandNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV &
                            operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const *>( &rhs );
      return *this;
    }

    BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      groupIndex = groupIndex_;
      return *this;
    }

    operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV *>( this );
    }

    operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default;
#else
    bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( groupIndex == rhs.groupIndex );
    }

    bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t groupIndex = {};
  };
  static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindShaderGroupIndirectCommandNV>::value,
                 "struct wrapper is not a standard layout!" );

  struct SparseMemoryBind
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize            resourceOffset_ = {},
                                           VULKAN_HPP_NAMESPACE::DeviceSize            size_           = {},
                                           VULKAN_HPP_NAMESPACE::DeviceMemory          memory_         = {},
                                           VULKAN_HPP_NAMESPACE::DeviceSize            memoryOffset_   = {},
                                           VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : resourceOffset( resourceOffset_ )
      , size( size_ )
      , memory( memory_ )
      , memoryOffset( memoryOffset_ )
      , flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseMemoryBind( *reinterpret_cast<SparseMemoryBind const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>( &rhs );
      return *this;
    }

    SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      resourceOffset = resourceOffset_;
      return *this;
    }

    SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseMemoryBind *>( this );
    }

    operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseMemoryBind *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseMemoryBind const & ) const = default;
#else
    bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) &&
             ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags );
    }

    bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DeviceSize            resourceOffset = {};
    VULKAN_HPP_NAMESPACE::DeviceSize            size           = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory          memory         = {};
    VULKAN_HPP_NAMESPACE::DeviceSize            memoryOffset   = {};
    VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags          = {};
  };
  static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );

  struct SparseBufferMemoryBindInfo
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer                   buffer_    = {},
                                  uint32_t                                       bindCount_ = {},
                                  const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_    = {} ) VULKAN_HPP_NOEXCEPT
      : buffer( buffer_ )
      , bindCount( bindCount_ )
      , pBinds( pBinds_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseBufferMemoryBindInfo( *reinterpret_cast<SparseBufferMemoryBindInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseBufferMemoryBindInfo(
      VULKAN_HPP_NAMESPACE::Buffer                                                                        buffer_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
      : buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo &
                            operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>( &rhs );
      return *this;
    }

    SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindCount = bindCount_;
      return *this;
    }

    SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pBinds = pBinds_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseBufferMemoryBindInfo & setBinds(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
      VULKAN_HPP_NOEXCEPT
    {
      bindCount = static_cast<uint32_t>( binds_.size() );
      pBinds    = binds_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseBufferMemoryBindInfo *>( this );
    }

    operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseBufferMemoryBindInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default;
#else
    bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
    }

    bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Buffer                   buffer    = {};
    uint32_t                                       bindCount = {};
    const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds    = {};
  };
  static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SparseBufferMemoryBindInfo>::value,
                 "struct wrapper is not a standard layout!" );

  struct SparseImageOpaqueMemoryBindInfo
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image                    image_     = {},
                                       uint32_t                                       bindCount_ = {},
                                       const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
      : image( image_ )
      , bindCount( bindCount_ )
      , pBinds( pBinds_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast<SparseImageOpaqueMemoryBindInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseImageOpaqueMemoryBindInfo(
      VULKAN_HPP_NAMESPACE::Image                                                                         image_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
      : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo &
                            operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>( &rhs );
      return *this;
    }

    SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindCount = bindCount_;
      return *this;
    }

    SparseImageOpaqueMemoryBindInfo &
      setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pBinds = pBinds_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseImageOpaqueMemoryBindInfo & setBinds(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
      VULKAN_HPP_NOEXCEPT
    {
      bindCount = static_cast<uint32_t>( binds_.size() );
      pBinds    = binds_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo *>( this );
    }

    operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default;
#else
    bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
    }

    bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Image                    image     = {};
    uint32_t                                       bindCount = {};
    const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds    = {};
  };
  static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SparseImageOpaqueMemoryBindInfo>::value,
                 "struct wrapper is not a standard layout!" );

  struct ImageSubresource
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {},
                                           uint32_t                               mipLevel_   = {},
                                           uint32_t                               arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask( aspectMask_ )
      , mipLevel( mipLevel_ )
      , arrayLayer( arrayLayer_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>( &rhs );
      return *this;
    }

    ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      mipLevel = mipLevel_;
      return *this;
    }

    ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      arrayLayer = arrayLayer_;
      return *this;
    }

    operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSubresource *>( this );
    }

    operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSubresource *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSubresource const & ) const = default;
#else
    bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer );
    }

    bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
    uint32_t                               mipLevel   = {};
    uint32_t                               arrayLayer = {};
  };
  static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageSubresource>::value, "struct wrapper is not a standard layout!" );

  struct Offset3D
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
      : x( x_ )
      , y( y_ )
      , z( z_ )
    {}

    VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast<Offset3D const *>( &rhs ) ) {}

    explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>( &rhs );
      return *this;
    }

    Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
    {
      z = z_;
      return *this;
    }

    operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkOffset3D *>( this );
    }

    operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkOffset3D *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Offset3D const & ) const = default;
#else
    bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z );
    }

    bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    int32_t x = {};
    int32_t y = {};
    int32_t z = {};
  };
  static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<Offset3D>::value, "struct wrapper is not a standard layout!" );

  struct Extent3D
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT
      : width( width_ )
      , height( height_ )
      , depth( depth_ )
    {}

    VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast<Extent3D const *>( &rhs ) ) {}

    explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} )
      : width( extent2D.width ), height( extent2D.height ), depth( depth_ )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>( &rhs );
      return *this;
    }

    Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
    {
      depth = depth_;
      return *this;
    }

    operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExtent3D *>( this );
    }

    operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExtent3D *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Extent3D const & ) const = default;
#else
    bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth );
    }

    bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t width  = {};
    uint32_t height = {};
    uint32_t depth  = {};
  };
  static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<Extent3D>::value, "struct wrapper is not a standard layout!" );

  struct SparseImageMemoryBind
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource      subresource_  = {},
                             VULKAN_HPP_NAMESPACE::Offset3D              offset_       = {},
                             VULKAN_HPP_NAMESPACE::Extent3D              extent_       = {},
                             VULKAN_HPP_NAMESPACE::DeviceMemory          memory_       = {},
                             VULKAN_HPP_NAMESPACE::DeviceSize            memoryOffset_ = {},
                             VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_        = {} ) VULKAN_HPP_NOEXCEPT
      : subresource( subresource_ )
      , offset( offset_ )
      , extent( extent_ )
      , memory( memory_ )
      , memoryOffset( memoryOffset_ )
      , flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageMemoryBind( *reinterpret_cast<SparseImageMemoryBind const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind &
                            operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>( &rhs );
      return *this;
    }

    SparseImageMemoryBind &
      setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
    {
      subresource = subresource_;
      return *this;
    }

    SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageMemoryBind *>( this );
    }

    operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageMemoryBind *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageMemoryBind const & ) const = default;
#else
    bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) &&
             ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags );
    }

    bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ImageSubresource      subresource  = {};
    VULKAN_HPP_NAMESPACE::Offset3D              offset       = {};
    VULKAN_HPP_NAMESPACE::Extent3D              extent       = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory          memory       = {};
    VULKAN_HPP_NAMESPACE::DeviceSize            memoryOffset = {};
    VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags        = {};
  };
  static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );

  struct SparseImageMemoryBindInfo
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image                         image_     = {},
                                 uint32_t                                            bindCount_ = {},
                                 const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT
      : image( image_ )
      , bindCount( bindCount_ )
      , pBinds( pBinds_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageMemoryBindInfo( *reinterpret_cast<SparseImageMemoryBindInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseImageMemoryBindInfo(
      VULKAN_HPP_NAMESPACE::Image                                                                              image_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
      : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo &
                            operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>( &rhs );
      return *this;
    }

    SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindCount = bindCount_;
      return *this;
    }

    SparseImageMemoryBindInfo &
      setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pBinds = pBinds_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SparseImageMemoryBindInfo & setBinds(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
      VULKAN_HPP_NOEXCEPT
    {
      bindCount = static_cast<uint32_t>( binds_.size() );
      pBinds    = binds_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageMemoryBindInfo *>( this );
    }

    operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageMemoryBindInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageMemoryBindInfo const & ) const = default;
#else
    bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds );
    }

    bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Image                         image     = {};
    uint32_t                                            bindCount = {};
    const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds    = {};
  };
  static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SparseImageMemoryBindInfo>::value,
                 "struct wrapper is not a standard layout!" );

  struct BindSparseInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BindSparseInfo( uint32_t                                                      waitSemaphoreCount_   = {},
                      const VULKAN_HPP_NAMESPACE::Semaphore *                       pWaitSemaphores_      = {},
                      uint32_t                                                      bufferBindCount_      = {},
                      const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo *      pBufferBinds_         = {},
                      uint32_t                                                      imageOpaqueBindCount_ = {},
                      const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_    = {},
                      uint32_t                                                      imageBindCount_       = {},
                      const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo *       pImageBinds_          = {},
                      uint32_t                                                      signalSemaphoreCount_ = {},
                      const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT
      : waitSemaphoreCount( waitSemaphoreCount_ )
      , pWaitSemaphores( pWaitSemaphores_ )
      , bufferBindCount( bufferBindCount_ )
      , pBufferBinds( pBufferBinds_ )
      , imageOpaqueBindCount( imageOpaqueBindCount_ )
      , pImageOpaqueBinds( pImageOpaqueBinds_ )
      , imageBindCount( imageBindCount_ )
      , pImageBinds( pImageBinds_ )
      , signalSemaphoreCount( signalSemaphoreCount_ )
      , pSignalSemaphores( pSignalSemaphores_ )
    {}

    VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindSparseInfo( *reinterpret_cast<BindSparseInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const &
        bufferBinds_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const &
        imageOpaqueBinds_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const &
        imageBinds_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const &
        signalSemaphores_ = {} )
      : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
      , pWaitSemaphores( waitSemaphores_.data() )
      , bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) )
      , pBufferBinds( bufferBinds_.data() )
      , imageOpaqueBindCount( static_cast<uint32_t>( imageOpaqueBinds_.size() ) )
      , pImageOpaqueBinds( imageOpaqueBinds_.data() )
      , imageBindCount( static_cast<uint32_t>( imageBinds_.size() ) )
      , pImageBinds( imageBinds_.data() )
      , signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) )
      , pSignalSemaphores( signalSemaphores_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>( &rhs );
      return *this;
    }

    BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = waitSemaphoreCount_;
      return *this;
    }

    BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphores = pWaitSemaphores_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setWaitSemaphores(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ )
      VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
      pWaitSemaphores    = waitSemaphores_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferBindCount = bufferBindCount_;
      return *this;
    }

    BindSparseInfo &
      setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo * pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pBufferBinds = pBufferBinds_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setBufferBinds(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const &
        bufferBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
      pBufferBinds    = bufferBinds_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOpaqueBindCount = imageOpaqueBindCount_;
      return *this;
    }

    BindSparseInfo & setPImageOpaqueBinds(
      const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageOpaqueBinds = pImageOpaqueBinds_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setImageOpaqueBinds(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const &
        imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
      pImageOpaqueBinds    = imageOpaqueBinds_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
    {
      imageBindCount = imageBindCount_;
      return *this;
    }

    BindSparseInfo &
      setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo * pImageBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageBinds = pImageBinds_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setImageBinds(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const &
        imageBinds_ ) VULKAN_HPP_NOEXCEPT
    {
      imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
      pImageBinds    = imageBinds_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = signalSemaphoreCount_;
      return *this;
    }

    BindSparseInfo &
      setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphores = pSignalSemaphores_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BindSparseInfo & setSignalSemaphores(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ )
      VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
      pSignalSemaphores    = signalSemaphores_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindSparseInfo *>( this );
    }

    operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindSparseInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindSparseInfo const & ) const = default;
#else
    bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
             ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) &&
             ( pBufferBinds == rhs.pBufferBinds ) && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) &&
             ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) && ( imageBindCount == rhs.imageBindCount ) &&
             ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
             ( pSignalSemaphores == rhs.pSignalSemaphores );
    }

    bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                           sType                = StructureType::eBindSparseInfo;
    const void *                                                  pNext                = {};
    uint32_t                                                      waitSemaphoreCount   = {};
    const VULKAN_HPP_NAMESPACE::Semaphore *                       pWaitSemaphores      = {};
    uint32_t                                                      bufferBindCount      = {};
    const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo *      pBufferBinds         = {};
    uint32_t                                                      imageOpaqueBindCount = {};
    const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds    = {};
    uint32_t                                                      imageBindCount       = {};
    const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo *       pImageBinds          = {};
    uint32_t                                                      signalSemaphoreCount = {};
    const VULKAN_HPP_NAMESPACE::Semaphore *                       pSignalSemaphores    = {};
  };
  static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindSparseInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBindSparseInfo>
  {
    using Type = BindSparseInfo;
  };

  struct BindVertexBufferIndirectCommandNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {},
                                                            uint32_t                            size_          = {},
                                                            uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT
      : bufferAddress( bufferAddress_ )
      , size( size_ )
      , stride( stride_ )
    {}

    VULKAN_HPP_CONSTEXPR
      BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : BindVertexBufferIndirectCommandNV( *reinterpret_cast<BindVertexBufferIndirectCommandNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV &
                            operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const *>( &rhs );
      return *this;
    }

    BindVertexBufferIndirectCommandNV &
      setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferAddress = bufferAddress_;
      return *this;
    }

    BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV *>( this );
    }

    operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default;
#else
    bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride );
    }

    bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
    uint32_t                            size          = {};
    uint32_t                            stride        = {};
  };
  static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BindVertexBufferIndirectCommandNV>::value,
                 "struct wrapper is not a standard layout!" );

  struct ImageSubresourceLayers
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_     = {},
                                                 uint32_t                               mipLevel_       = {},
                                                 uint32_t                               baseArrayLayer_ = {},
                                                 uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask( aspectMask_ )
      , mipLevel( mipLevel_ )
      , baseArrayLayer( baseArrayLayer_ )
      , layerCount( layerCount_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageSubresourceLayers( *reinterpret_cast<ImageSubresourceLayers const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers &
                            operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>( &rhs );
      return *this;
    }

    ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      mipLevel = mipLevel_;
      return *this;
    }

    ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      baseArrayLayer = baseArrayLayer_;
      return *this;
    }

    ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }

    operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSubresourceLayers *>( this );
    }

    operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSubresourceLayers *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSubresourceLayers const & ) const = default;
#else
    bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) &&
             ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
    }

    bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask     = {};
    uint32_t                               mipLevel       = {};
    uint32_t                               baseArrayLayer = {};
    uint32_t                               layerCount     = {};
  };
  static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );

  struct ImageBlit2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      ImageBlit2KHR( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers          srcSubresource_ = {},
                     std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_     = {},
                     VULKAN_HPP_NAMESPACE::ImageSubresourceLayers          dstSubresource_ = {},
                     std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_     = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubresource( srcSubresource_ )
      , srcOffsets( srcOffsets_ )
      , dstSubresource( dstSubresource_ )
      , dstOffsets( dstOffsets_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageBlit2KHR( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageBlit2KHR( *reinterpret_cast<ImageBlit2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR & operator=( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageBlit2KHR & operator=( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit2KHR const *>( &rhs );
      return *this;
    }

    ImageBlit2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageBlit2KHR &
      setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    ImageBlit2KHR &
      setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffsets = srcOffsets_;
      return *this;
    }

    ImageBlit2KHR &
      setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    ImageBlit2KHR &
      setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffsets = dstOffsets_;
      return *this;
    }

    operator VkImageBlit2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageBlit2KHR *>( this );
    }

    operator VkImageBlit2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageBlit2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageBlit2KHR const & ) const = default;
#else
    bool operator==( ImageBlit2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) &&
             ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) &&
             ( dstOffsets == rhs.dstOffsets );
    }

    bool operator!=( ImageBlit2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                                     sType = StructureType::eImageBlit2KHR;
    const void *                                                            pNext = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers                            srcSubresource = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets     = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers                            dstSubresource = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets     = {};
  };
  static_assert( sizeof( ImageBlit2KHR ) == sizeof( VkImageBlit2KHR ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageBlit2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageBlit2KHR>
  {
    using Type = ImageBlit2KHR;
  };

  struct BlitImageInfo2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR(
      VULKAN_HPP_NAMESPACE::Image                 srcImage_       = {},
      VULKAN_HPP_NAMESPACE::ImageLayout           srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      VULKAN_HPP_NAMESPACE::Image                 dstImage_       = {},
      VULKAN_HPP_NAMESPACE::ImageLayout           dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      uint32_t                                    regionCount_    = {},
      const VULKAN_HPP_NAMESPACE::ImageBlit2KHR * pRegions_       = {},
      VULKAN_HPP_NAMESPACE::Filter                filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest ) VULKAN_HPP_NOEXCEPT
      : srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( regionCount_ )
      , pRegions( pRegions_ )
      , filter( filter_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BlitImageInfo2KHR( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : BlitImageInfo2KHR( *reinterpret_cast<BlitImageInfo2KHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BlitImageInfo2KHR(
      VULKAN_HPP_NAMESPACE::Image                                                                      srcImage_,
      VULKAN_HPP_NAMESPACE::ImageLayout                                                                srcImageLayout_,
      VULKAN_HPP_NAMESPACE::Image                                                                      dstImage_,
      VULKAN_HPP_NAMESPACE::ImageLayout                                                                dstImageLayout_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2KHR> const & regions_,
      VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest )
      : srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
      , filter( filter_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR &
                            operator=( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BlitImageInfo2KHR & operator=( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR const *>( &rhs );
      return *this;
    }

    BlitImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BlitImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    BlitImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    BlitImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    BlitImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    BlitImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    BlitImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BlitImageInfo2KHR & setRegions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2KHR> const & regions_ )
      VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    BlitImageInfo2KHR & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT
    {
      filter = filter_;
      return *this;
    }

    operator VkBlitImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBlitImageInfo2KHR *>( this );
    }

    operator VkBlitImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBlitImageInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BlitImageInfo2KHR const & ) const = default;
#else
    bool operator==( BlitImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) &&
             ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) &&
             ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) &&
             ( pRegions == rhs.pRegions ) && ( filter == rhs.filter );
    }

    bool operator!=( BlitImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType          = StructureType::eBlitImageInfo2KHR;
    const void *                                pNext          = {};
    VULKAN_HPP_NAMESPACE::Image                 srcImage       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout           srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::Image                 dstImage       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout           dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    uint32_t                                    regionCount    = {};
    const VULKAN_HPP_NAMESPACE::ImageBlit2KHR * pRegions       = {};
    VULKAN_HPP_NAMESPACE::Filter                filter         = VULKAN_HPP_NAMESPACE::Filter::eNearest;
  };
  static_assert( sizeof( BlitImageInfo2KHR ) == sizeof( VkBlitImageInfo2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BlitImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBlitImageInfo2KHR>
  {
    using Type = BlitImageInfo2KHR;
  };

  struct BufferCopy
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {},
                                     VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
                                     VULKAN_HPP_NAMESPACE::DeviceSize size_      = {} ) VULKAN_HPP_NOEXCEPT
      : srcOffset( srcOffset_ )
      , dstOffset( dstOffset_ )
      , size( size_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>( &rhs );
      return *this;
    }

    BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCopy *>( this );
    }

    operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCopy *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCopy const & ) const = default;
#else
    bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
    }

    bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
    VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
    VULKAN_HPP_NAMESPACE::DeviceSize size      = {};
  };
  static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferCopy>::value, "struct wrapper is not a standard layout!" );

  struct BufferCopy2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferCopy2KHR( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {},
                                         VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {},
                                         VULKAN_HPP_NAMESPACE::DeviceSize size_      = {} ) VULKAN_HPP_NOEXCEPT
      : srcOffset( srcOffset_ )
      , dstOffset( dstOffset_ )
      , size( size_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferCopy2KHR( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCopy2KHR( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferCopy2KHR( *reinterpret_cast<BufferCopy2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferCopy2KHR & operator=( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCopy2KHR & operator=( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy2KHR const *>( &rhs );
      return *this;
    }

    BufferCopy2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    BufferCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    BufferCopy2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    operator VkBufferCopy2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCopy2KHR *>( this );
    }

    operator VkBufferCopy2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCopy2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCopy2KHR const & ) const = default;
#else
    bool operator==( BufferCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) &&
             ( dstOffset == rhs.dstOffset ) && ( size == rhs.size );
    }

    bool operator!=( BufferCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType     = StructureType::eBufferCopy2KHR;
    const void *                        pNext     = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    srcOffset = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    dstOffset = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    size      = {};
  };
  static_assert( sizeof( BufferCopy2KHR ) == sizeof( VkBufferCopy2KHR ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferCopy2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferCopy2KHR>
  {
    using Type = BufferCopy2KHR;
  };

  struct BufferCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {},
                        VULKAN_HPP_NAMESPACE::DeviceSize        size_  = {},
                        VULKAN_HPP_NAMESPACE::BufferUsageFlags  usage_ = {},
                        VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
                        uint32_t                          queueFamilyIndexCount_ = {},
                        const uint32_t *                  pQueueFamilyIndices_   = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , size( size_ )
      , usage( usage_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( queueFamilyIndexCount_ )
      , pQueueFamilyIndices( pQueueFamilyIndices_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags                               flags_,
                      VULKAN_HPP_NAMESPACE::DeviceSize                                      size_,
                      VULKAN_HPP_NAMESPACE::BufferUsageFlags                                usage_,
                      VULKAN_HPP_NAMESPACE::SharingMode                                     sharingMode_,
                      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
      : flags( flags_ )
      , size( size_ )
      , usage( usage_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
      , pQueueFamilyIndices( queueFamilyIndices_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>( &rhs );
      return *this;
    }

    BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      sharingMode = sharingMode_;
      return *this;
    }

    BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = queueFamilyIndexCount_;
      return *this;
    }

    BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueFamilyIndices = pQueueFamilyIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    BufferCreateInfo & setQueueFamilyIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
      pQueueFamilyIndices   = queueFamilyIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferCreateInfo *>( this );
    }

    operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferCreateInfo const & ) const = default;
#else
    bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) &&
             ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) &&
             ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
             ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
    }

    bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType     sType                 = StructureType::eBufferCreateInfo;
    const void *                            pNext                 = {};
    VULKAN_HPP_NAMESPACE::BufferCreateFlags flags                 = {};
    VULKAN_HPP_NAMESPACE::DeviceSize        size                  = {};
    VULKAN_HPP_NAMESPACE::BufferUsageFlags  usage                 = {};
    VULKAN_HPP_NAMESPACE::SharingMode       sharingMode           = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
    uint32_t                                queueFamilyIndexCount = {};
    const uint32_t *                        pQueueFamilyIndices   = {};
  };
  static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferCreateInfo>
  {
    using Type = BufferCreateInfo;
  };

  struct BufferDeviceAddressCreateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT
      : deviceAddress( deviceAddress_ )
    {}

    VULKAN_HPP_CONSTEXPR
      BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast<BufferDeviceAddressCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT &
                            operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>( &rhs );
      return *this;
    }

    BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferDeviceAddressCreateInfoEXT &
      setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT *>( this );
    }

    operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default;
#else
    bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress );
    }

    bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::eBufferDeviceAddressCreateInfoEXT;
    const void *                        pNext         = {};
    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
  };
  static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferDeviceAddressCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferDeviceAddressCreateInfoEXT>
  {
    using Type = BufferDeviceAddressCreateInfoEXT;
  };

  struct BufferDeviceAddressInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
      : buffer( buffer_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferDeviceAddressInfo( *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo &
                            operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>( &rhs );
      return *this;
    }

    BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferDeviceAddressInfo *>( this );
    }

    operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferDeviceAddressInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferDeviceAddressInfo const & ) const = default;
#else
    bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
    }

    bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eBufferDeviceAddressInfo;
    const void *                        pNext  = {};
    VULKAN_HPP_NAMESPACE::Buffer        buffer = {};
  };
  static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferDeviceAddressInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
  {
    using Type = BufferDeviceAddressInfo;
  };
  using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
  using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;

  struct BufferImageCopy
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize             bufferOffset_      = {},
                                          uint32_t                                     bufferRowLength_   = {},
                                          uint32_t                                     bufferImageHeight_ = {},
                                          VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_  = {},
                                          VULKAN_HPP_NAMESPACE::Offset3D               imageOffset_       = {},
                                          VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
      : bufferOffset( bufferOffset_ )
      , bufferRowLength( bufferRowLength_ )
      , bufferImageHeight( bufferImageHeight_ )
      , imageSubresource( imageSubresource_ )
      , imageOffset( imageOffset_ )
      , imageExtent( imageExtent_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>( &rhs );
      return *this;
    }

    BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferOffset = bufferOffset_;
      return *this;
    }

    BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferRowLength = bufferRowLength_;
      return *this;
    }

    BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferImageHeight = bufferImageHeight_;
      return *this;
    }

    BufferImageCopy &
      setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSubresource = imageSubresource_;
      return *this;
    }

    BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOffset = imageOffset_;
      return *this;
    }

    BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }

    operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferImageCopy *>( this );
    }

    operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferImageCopy *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferImageCopy const & ) const = default;
#else
    bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) &&
             ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) &&
             ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent );
    }

    bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DeviceSize             bufferOffset      = {};
    uint32_t                                     bufferRowLength   = {};
    uint32_t                                     bufferImageHeight = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource  = {};
    VULKAN_HPP_NAMESPACE::Offset3D               imageOffset       = {};
    VULKAN_HPP_NAMESPACE::Extent3D               imageExtent       = {};
  };
  static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferImageCopy>::value, "struct wrapper is not a standard layout!" );

  struct BufferImageCopy2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( VULKAN_HPP_NAMESPACE::DeviceSize             bufferOffset_      = {},
                                              uint32_t                                     bufferRowLength_   = {},
                                              uint32_t                                     bufferImageHeight_ = {},
                                              VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_  = {},
                                              VULKAN_HPP_NAMESPACE::Offset3D               imageOffset_       = {},
                                              VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
      : bufferOffset( bufferOffset_ )
      , bufferRowLength( bufferRowLength_ )
      , bufferImageHeight( bufferImageHeight_ )
      , imageSubresource( imageSubresource_ )
      , imageOffset( imageOffset_ )
      , imageExtent( imageExtent_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferImageCopy2KHR( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferImageCopy2KHR( *reinterpret_cast<BufferImageCopy2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2KHR &
                            operator=( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferImageCopy2KHR & operator=( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR const *>( &rhs );
      return *this;
    }

    BufferImageCopy2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferImageCopy2KHR & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferOffset = bufferOffset_;
      return *this;
    }

    BufferImageCopy2KHR & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferRowLength = bufferRowLength_;
      return *this;
    }

    BufferImageCopy2KHR & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferImageHeight = bufferImageHeight_;
      return *this;
    }

    BufferImageCopy2KHR &
      setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSubresource = imageSubresource_;
      return *this;
    }

    BufferImageCopy2KHR & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      imageOffset = imageOffset_;
      return *this;
    }

    BufferImageCopy2KHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }

    operator VkBufferImageCopy2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferImageCopy2KHR *>( this );
    }

    operator VkBufferImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferImageCopy2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferImageCopy2KHR const & ) const = default;
#else
    bool operator==( BufferImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) &&
             ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) &&
             ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) &&
             ( imageExtent == rhs.imageExtent );
    }

    bool operator!=( BufferImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType             = StructureType::eBufferImageCopy2KHR;
    const void *                                 pNext             = {};
    VULKAN_HPP_NAMESPACE::DeviceSize             bufferOffset      = {};
    uint32_t                                     bufferRowLength   = {};
    uint32_t                                     bufferImageHeight = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource  = {};
    VULKAN_HPP_NAMESPACE::Offset3D               imageOffset       = {};
    VULKAN_HPP_NAMESPACE::Extent3D               imageExtent       = {};
  };
  static_assert( sizeof( BufferImageCopy2KHR ) == sizeof( VkBufferImageCopy2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferImageCopy2KHR>
  {
    using Type = BufferImageCopy2KHR;
  };

  struct BufferMemoryBarrier
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_       = {},
                                              VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_       = {},
                                              uint32_t                          srcQueueFamilyIndex_ = {},
                                              uint32_t                          dstQueueFamilyIndex_ = {},
                                              VULKAN_HPP_NAMESPACE::Buffer      buffer_              = {},
                                              VULKAN_HPP_NAMESPACE::DeviceSize  offset_              = {},
                                              VULKAN_HPP_NAMESPACE::DeviceSize  size_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcAccessMask( srcAccessMask_ )
      , dstAccessMask( dstAccessMask_ )
      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
      , buffer( buffer_ )
      , offset( offset_ )
      , size( size_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier &
                            operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>( &rhs );
      return *this;
    }

    BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      srcQueueFamilyIndex = srcQueueFamilyIndex_;
      return *this;
    }

    BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      dstQueueFamilyIndex = dstQueueFamilyIndex_;
      return *this;
    }

    BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferMemoryBarrier *>( this );
    }

    operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferMemoryBarrier *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferMemoryBarrier const & ) const = default;
#else
    bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) &&
             ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
             ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) &&
             ( offset == rhs.offset ) && ( size == rhs.size );
    }

    bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::eBufferMemoryBarrier;
    const void *                        pNext               = {};
    VULKAN_HPP_NAMESPACE::AccessFlags   srcAccessMask       = {};
    VULKAN_HPP_NAMESPACE::AccessFlags   dstAccessMask       = {};
    uint32_t                            srcQueueFamilyIndex = {};
    uint32_t                            dstQueueFamilyIndex = {};
    VULKAN_HPP_NAMESPACE::Buffer        buffer              = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    offset              = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    size                = {};
  };
  static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
  {
    using Type = BufferMemoryBarrier;
  };

  struct BufferMemoryBarrier2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_  = {},
                                                  VULKAN_HPP_NAMESPACE::AccessFlags2KHR        srcAccessMask_ = {},
                                                  VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_  = {},
                                                  VULKAN_HPP_NAMESPACE::AccessFlags2KHR        dstAccessMask_ = {},
                                                  uint32_t                         srcQueueFamilyIndex_       = {},
                                                  uint32_t                         dstQueueFamilyIndex_       = {},
                                                  VULKAN_HPP_NAMESPACE::Buffer     buffer_                    = {},
                                                  VULKAN_HPP_NAMESPACE::DeviceSize offset_                    = {},
                                                  VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcStageMask( srcStageMask_ )
      , srcAccessMask( srcAccessMask_ )
      , dstStageMask( dstStageMask_ )
      , dstAccessMask( dstAccessMask_ )
      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
      , buffer( buffer_ )
      , offset( offset_ )
      , size( size_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2KHR( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferMemoryBarrier2KHR( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferMemoryBarrier2KHR( *reinterpret_cast<BufferMemoryBarrier2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2KHR &
                            operator=( BufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferMemoryBarrier2KHR & operator=( VkBufferMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR const *>( &rhs );
      return *this;
    }

    BufferMemoryBarrier2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferMemoryBarrier2KHR &
      setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    BufferMemoryBarrier2KHR &
      setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    BufferMemoryBarrier2KHR &
      setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    BufferMemoryBarrier2KHR &
      setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    BufferMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      srcQueueFamilyIndex = srcQueueFamilyIndex_;
      return *this;
    }

    BufferMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      dstQueueFamilyIndex = dstQueueFamilyIndex_;
      return *this;
    }

    BufferMemoryBarrier2KHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    BufferMemoryBarrier2KHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    BufferMemoryBarrier2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    operator VkBufferMemoryBarrier2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferMemoryBarrier2KHR *>( this );
    }

    operator VkBufferMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferMemoryBarrier2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferMemoryBarrier2KHR const & ) const = default;
#else
    bool operator==( BufferMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) &&
             ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) &&
             ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
             ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) &&
             ( offset == rhs.offset ) && ( size == rhs.size );
    }

    bool operator!=( BufferMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType               = StructureType::eBufferMemoryBarrier2KHR;
    const void *                                 pNext               = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask        = {};
    VULKAN_HPP_NAMESPACE::AccessFlags2KHR        srcAccessMask       = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask        = {};
    VULKAN_HPP_NAMESPACE::AccessFlags2KHR        dstAccessMask       = {};
    uint32_t                                     srcQueueFamilyIndex = {};
    uint32_t                                     dstQueueFamilyIndex = {};
    VULKAN_HPP_NAMESPACE::Buffer                 buffer              = {};
    VULKAN_HPP_NAMESPACE::DeviceSize             offset              = {};
    VULKAN_HPP_NAMESPACE::DeviceSize             size                = {};
  };
  static_assert( sizeof( BufferMemoryBarrier2KHR ) == sizeof( VkBufferMemoryBarrier2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferMemoryBarrier2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferMemoryBarrier2KHR>
  {
    using Type = BufferMemoryBarrier2KHR;
  };

  struct BufferMemoryRequirementsInfo2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
      : buffer( buffer_ )
    {}

    VULKAN_HPP_CONSTEXPR
      BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferMemoryRequirementsInfo2( *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 &
                            operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>( &rhs );
      return *this;
    }

    BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( this );
    }

    operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferMemoryRequirementsInfo2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default;
#else
    bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
    }

    bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eBufferMemoryRequirementsInfo2;
    const void *                        pNext  = {};
    VULKAN_HPP_NAMESPACE::Buffer        buffer = {};
  };
  static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferMemoryRequirementsInfo2>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
  {
    using Type = BufferMemoryRequirementsInfo2;
  };
  using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;

  struct BufferOpaqueCaptureAddressCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eBufferOpaqueCaptureAddressCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT
      : opaqueCaptureAddress( opaqueCaptureAddress_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo &
                            operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferOpaqueCaptureAddressCreateInfo &
      operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
      return *this;
    }

    BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      opaqueCaptureAddress = opaqueCaptureAddress_;
      return *this;
    }

    operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo *>( this );
    }

    operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default;
#else
    bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
    }

    bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
    const void *                        pNext                = {};
    uint64_t                            opaqueCaptureAddress = {};
  };
  static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferOpaqueCaptureAddressCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
  {
    using Type = BufferOpaqueCaptureAddressCreateInfo;
  };
  using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;

  struct BufferViewCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_  = {},
                            VULKAN_HPP_NAMESPACE::Buffer                buffer_ = {},
                            VULKAN_HPP_NAMESPACE::Format     format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
                            VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
                            VULKAN_HPP_NAMESPACE::DeviceSize range_  = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , buffer( buffer_ )
      , format( format_ )
      , offset( offset_ )
      , range( range_ )
    {}

    VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : BufferViewCreateInfo( *reinterpret_cast<BufferViewCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo &
                            operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>( &rhs );
      return *this;
    }

    BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
    {
      range = range_;
      return *this;
    }

    operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkBufferViewCreateInfo *>( this );
    }

    operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkBufferViewCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferViewCreateInfo const & ) const = default;
#else
    bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) &&
             ( format == rhs.format ) && ( offset == rhs.offset ) && ( range == rhs.range );
    }

    bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType  = StructureType::eBufferViewCreateInfo;
    const void *                                pNext  = {};
    VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags  = {};
    VULKAN_HPP_NAMESPACE::Buffer                buffer = {};
    VULKAN_HPP_NAMESPACE::Format                format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::DeviceSize            offset = {};
    VULKAN_HPP_NAMESPACE::DeviceSize            range  = {};
  };
  static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
  {
    using Type = BufferViewCreateInfo;
  };

  struct CalibratedTimestampInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ =
                                    VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT
      : timeDomain( timeDomain_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : CalibratedTimestampInfoEXT( *reinterpret_cast<CalibratedTimestampInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT &
                            operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>( &rhs );
      return *this;
    }

    CalibratedTimestampInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
    {
      timeDomain = timeDomain_;
      return *this;
    }

    operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( this );
    }

    operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCalibratedTimestampInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CalibratedTimestampInfoEXT const & ) const = default;
#else
    bool operator==( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain );
    }

    bool operator!=( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType      = StructureType::eCalibratedTimestampInfoEXT;
    const void *                        pNext      = {};
    VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice;
  };
  static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CalibratedTimestampInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCalibratedTimestampInfoEXT>
  {
    using Type = CalibratedTimestampInfoEXT;
  };

  struct CheckpointData2NV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage_ = {},
                                            void * pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT
      : stage( stage_ )
      , pCheckpointMarker( pCheckpointMarker_ )
    {}

    VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CheckpointData2NV( *reinterpret_cast<CheckpointData2NV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CheckpointData2NV &
                            operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointData2NV const *>( &rhs );
      return *this;
    }

    operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCheckpointData2NV *>( this );
    }

    operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCheckpointData2NV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CheckpointData2NV const & ) const = default;
#else
    bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) &&
             ( pCheckpointMarker == rhs.pCheckpointMarker );
    }

    bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType             = StructureType::eCheckpointData2NV;
    void *                                       pNext             = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage             = {};
    void *                                       pCheckpointMarker = {};
  };
  static_assert( sizeof( CheckpointData2NV ) == sizeof( VkCheckpointData2NV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CheckpointData2NV>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCheckpointData2NV>
  {
    using Type = CheckpointData2NV;
  };

  struct CheckpointDataNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CheckpointDataNV(
      VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe,
      void *                                      pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT
      : stage( stage_ )
      , pCheckpointMarker( pCheckpointMarker_ )
    {}

    VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CheckpointDataNV( *reinterpret_cast<CheckpointDataNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>( &rhs );
      return *this;
    }

    operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCheckpointDataNV *>( this );
    }

    operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCheckpointDataNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CheckpointDataNV const & ) const = default;
#else
    bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) &&
             ( pCheckpointMarker == rhs.pCheckpointMarker );
    }

    bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType = StructureType::eCheckpointDataNV;
    void *                                      pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
    void *                                      pCheckpointMarker = {};
  };
  static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCheckpointDataNV>
  {
    using Type = CheckpointDataNV;
  };

  union ClearColorValue
  {
    ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
    }

    ClearColorValue( const std::array<float, 4> & float32_ = {} ) : float32( float32_ ) {}

    ClearColorValue( const std::array<int32_t, 4> & int32_ ) : int32( int32_ ) {}

    ClearColorValue( const std::array<uint32_t, 4> & uint32_ ) : uint32( uint32_ ) {}

    ClearColorValue & setFloat32( std::array<float, 4> float32_ ) VULKAN_HPP_NOEXCEPT
    {
      float32 = float32_;
      return *this;
    }

    ClearColorValue & setInt32( std::array<int32_t, 4> int32_ ) VULKAN_HPP_NOEXCEPT
    {
      int32 = int32_;
      return *this;
    }

    ClearColorValue & setUint32( std::array<uint32_t, 4> uint32_ ) VULKAN_HPP_NOEXCEPT
    {
      uint32 = uint32_;
      return *this;
    }

    VULKAN_HPP_NAMESPACE::ClearColorValue &
      operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
      return *this;
    }

    operator VkClearColorValue const &() const
    {
      return *reinterpret_cast<const VkClearColorValue *>( this );
    }

    operator VkClearColorValue &()
    {
      return *reinterpret_cast<VkClearColorValue *>( this );
    }

    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4>    float32;
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, 4>  int32;
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 4> uint32;
  };

  struct ClearDepthStencilValue
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT
      : depth( depth_ )
      , stencil( stencil_ )
    {}

    VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClearDepthStencilValue( *reinterpret_cast<ClearDepthStencilValue const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue &
                            operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>( &rhs );
      return *this;
    }

    ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT
    {
      depth = depth_;
      return *this;
    }

    ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT
    {
      stencil = stencil_;
      return *this;
    }

    operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClearDepthStencilValue *>( this );
    }

    operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClearDepthStencilValue *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClearDepthStencilValue const & ) const = default;
#else
    bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( depth == rhs.depth ) && ( stencil == rhs.stencil );
    }

    bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float    depth   = {};
    uint32_t stencil = {};
  };
  static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );

  union ClearValue
  {
    ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
    }

    ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {}

    ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {}

    ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
    {
      color = color_;
      return *this;
    }

    ClearValue &
      setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
    {
      depthStencil = depthStencil_;
      return *this;
    }

    VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
      return *this;
    }

    operator VkClearValue const &() const
    {
      return *reinterpret_cast<const VkClearValue *>( this );
    }

    operator VkClearValue &()
    {
      return *reinterpret_cast<VkClearValue *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    VULKAN_HPP_NAMESPACE::ClearColorValue        color;
    VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil;
#else
    VkClearColorValue        color;
    VkClearDepthStencilValue depthStencil;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

  struct ClearAttachment
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_      = {},
                     uint32_t                               colorAttachment_ = {},
                     VULKAN_HPP_NAMESPACE::ClearValue       clearValue_      = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask( aspectMask_ )
      , colorAttachment( colorAttachment_ )
      , clearValue( clearValue_ )
    {}

    ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
      : ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>( &rhs );
      return *this;
    }

    ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachment = colorAttachment_;
      return *this;
    }

    ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
    {
      clearValue = clearValue_;
      return *this;
    }

    operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClearAttachment *>( this );
    }

    operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClearAttachment *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask      = {};
    uint32_t                               colorAttachment = {};
    VULKAN_HPP_NAMESPACE::ClearValue       clearValue      = {};
  };
  static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ClearAttachment>::value, "struct wrapper is not a standard layout!" );

  struct ClearRect
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_           = {},
                                    uint32_t                     baseArrayLayer_ = {},
                                    uint32_t                     layerCount_     = {} ) VULKAN_HPP_NOEXCEPT
      : rect( rect_ )
      , baseArrayLayer( baseArrayLayer_ )
      , layerCount( layerCount_ )
    {}

    VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>( &rhs );
      return *this;
    }

    ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT
    {
      rect = rect_;
      return *this;
    }

    ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      baseArrayLayer = baseArrayLayer_;
      return *this;
    }

    ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }

    operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkClearRect *>( this );
    }

    operator VkClearRect &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkClearRect *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ClearRect const & ) const = default;
#else
    bool                     operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount );
    }

    bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Rect2D rect           = {};
    uint32_t                     baseArrayLayer = {};
    uint32_t                     layerCount     = {};
  };
  static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ClearRect>::value, "struct wrapper is not a standard layout!" );

  struct CoarseSampleLocationNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT
      : pixelX( pixelX_ )
      , pixelY( pixelY_ )
      , sample( sample_ )
    {}

    VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CoarseSampleLocationNV( *reinterpret_cast<CoarseSampleLocationNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV &
                            operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>( &rhs );
      return *this;
    }

    CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
    {
      pixelX = pixelX_;
      return *this;
    }

    CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
    {
      pixelY = pixelY_;
      return *this;
    }

    CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
    {
      sample = sample_;
      return *this;
    }

    operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCoarseSampleLocationNV *>( this );
    }

    operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCoarseSampleLocationNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CoarseSampleLocationNV const & ) const = default;
#else
    bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample );
    }

    bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t pixelX = {};
    uint32_t pixelY = {};
    uint32_t sample = {};
  };
  static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );

  struct CoarseSampleOrderCustomNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(
      VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ =
        VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations,
      uint32_t                                             sampleCount_         = {},
      uint32_t                                             sampleLocationCount_ = {},
      const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_    = {} ) VULKAN_HPP_NOEXCEPT
      : shadingRate( shadingRate_ )
      , sampleCount( sampleCount_ )
      , sampleLocationCount( sampleLocationCount_ )
      , pSampleLocations( pSampleLocations_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CoarseSampleOrderCustomNV( *reinterpret_cast<CoarseSampleOrderCustomNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CoarseSampleOrderCustomNV(
      VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_,
      uint32_t                                        sampleCount_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const &
        sampleLocations_ )
      : shadingRate( shadingRate_ )
      , sampleCount( sampleCount_ )
      , sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) )
      , pSampleLocations( sampleLocations_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV &
                            operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>( &rhs );
      return *this;
    }

    CoarseSampleOrderCustomNV &
      setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRate = shadingRate_;
      return *this;
    }

    CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleCount = sampleCount_;
      return *this;
    }

    CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationCount = sampleLocationCount_;
      return *this;
    }

    CoarseSampleOrderCustomNV &
      setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      pSampleLocations = pSampleLocations_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CoarseSampleOrderCustomNV & setSampleLocations(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const &
        sampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
      pSampleLocations    = sampleLocations_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( this );
    }

    operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCoarseSampleOrderCustomNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default;
#else
    bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) &&
             ( sampleLocationCount == rhs.sampleLocationCount ) && ( pSampleLocations == rhs.pSampleLocations );
    }

    bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate =
      VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
    uint32_t                                             sampleCount         = {};
    uint32_t                                             sampleLocationCount = {};
    const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations    = {};
  };
  static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CoarseSampleOrderCustomNV>::value,
                 "struct wrapper is not a standard layout!" );

  class CommandPool
  {
  public:
    using CType = VkCommandPool;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;

  public:
    VULKAN_HPP_CONSTEXPR         CommandPool() = default;
    VULKAN_HPP_CONSTEXPR         CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
      : m_commandPool( commandPool )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
    {
      m_commandPool = commandPool;
      return *this;
    }
#endif

    CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_commandPool = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandPool const & ) const = default;
#else
    bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_commandPool == rhs.m_commandPool;
    }

    bool operator!=( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_commandPool != rhs.m_commandPool;
    }

    bool operator<( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_commandPool < rhs.m_commandPool;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT
    {
      return m_commandPool;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_commandPool != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_commandPool == VK_NULL_HANDLE;
    }

  private:
    VkCommandPool m_commandPool = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eCommandPool>
  {
    using type = VULKAN_HPP_NAMESPACE::CommandPool;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool>
  {
    using Type = VULKAN_HPP_NAMESPACE::CommandPool;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool>
  {
    using Type = VULKAN_HPP_NAMESPACE::CommandPool;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandPool>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct CommandBufferAllocateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(
      VULKAN_HPP_NAMESPACE::CommandPool        commandPool_        = {},
      VULKAN_HPP_NAMESPACE::CommandBufferLevel level_              = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary,
      uint32_t                                 commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : commandPool( commandPool_ )
      , level( level_ )
      , commandBufferCount( commandBufferCount_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferAllocateInfo( *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo &
                            operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>( &rhs );
      return *this;
    }

    CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
    {
      commandPool = commandPool_;
      return *this;
    }

    CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
    {
      level = level_;
      return *this;
    }

    CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount = commandBufferCount_;
      return *this;
    }

    operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferAllocateInfo *>( this );
    }

    operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferAllocateInfo const & ) const = default;
#else
    bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) &&
             ( level == rhs.level ) && ( commandBufferCount == rhs.commandBufferCount );
    }

    bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType              = StructureType::eCommandBufferAllocateInfo;
    const void *                             pNext              = {};
    VULKAN_HPP_NAMESPACE::CommandPool        commandPool        = {};
    VULKAN_HPP_NAMESPACE::CommandBufferLevel level              = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
    uint32_t                                 commandBufferCount = {};
  };
  static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CommandBufferAllocateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
  {
    using Type = CommandBufferAllocateInfo;
  };

  class RenderPass
  {
  public:
    using CType = VkRenderPass;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;

  public:
    VULKAN_HPP_CONSTEXPR         RenderPass() = default;
    VULKAN_HPP_CONSTEXPR         RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT
    {
      m_renderPass = renderPass;
      return *this;
    }
#endif

    RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_renderPass = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPass const & ) const = default;
#else
    bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_renderPass == rhs.m_renderPass;
    }

    bool operator!=( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_renderPass != rhs.m_renderPass;
    }

    bool operator<( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_renderPass < rhs.m_renderPass;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT
    {
      return m_renderPass;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_renderPass != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_renderPass == VK_NULL_HANDLE;
    }

  private:
    VkRenderPass m_renderPass = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eRenderPass>
  {
    using type = VULKAN_HPP_NAMESPACE::RenderPass;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass>
  {
    using Type = VULKAN_HPP_NAMESPACE::RenderPass;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass>
  {
    using Type = VULKAN_HPP_NAMESPACE::RenderPass;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::RenderPass>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  class Framebuffer
  {
  public:
    using CType = VkFramebuffer;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;

  public:
    VULKAN_HPP_CONSTEXPR         Framebuffer() = default;
    VULKAN_HPP_CONSTEXPR         Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
      : m_framebuffer( framebuffer )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
    {
      m_framebuffer = framebuffer;
      return *this;
    }
#endif

    Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_framebuffer = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Framebuffer const & ) const = default;
#else
    bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_framebuffer == rhs.m_framebuffer;
    }

    bool operator!=( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_framebuffer != rhs.m_framebuffer;
    }

    bool operator<( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_framebuffer < rhs.m_framebuffer;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT
    {
      return m_framebuffer;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_framebuffer != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_framebuffer == VK_NULL_HANDLE;
    }

  private:
    VkFramebuffer m_framebuffer = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eFramebuffer>
  {
    using type = VULKAN_HPP_NAMESPACE::Framebuffer;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer>
  {
    using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer>
  {
    using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Framebuffer>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct CommandBufferInheritanceInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(
      VULKAN_HPP_NAMESPACE::RenderPass                  renderPass_           = {},
      uint32_t                                          subpass_              = {},
      VULKAN_HPP_NAMESPACE::Framebuffer                 framebuffer_          = {},
      VULKAN_HPP_NAMESPACE::Bool32                      occlusionQueryEnable_ = {},
      VULKAN_HPP_NAMESPACE::QueryControlFlags           queryFlags_           = {},
      VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_   = {} ) VULKAN_HPP_NOEXCEPT
      : renderPass( renderPass_ )
      , subpass( subpass_ )
      , framebuffer( framebuffer_ )
      , occlusionQueryEnable( occlusionQueryEnable_ )
      , queryFlags( queryFlags_ )
      , pipelineStatistics( pipelineStatistics_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferInheritanceInfo( *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo &
                            operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>( &rhs );
      return *this;
    }

    CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPass = renderPass_;
      return *this;
    }

    CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
    {
      subpass = subpass_;
      return *this;
    }

    CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      framebuffer = framebuffer_;
      return *this;
    }

    CommandBufferInheritanceInfo &
      setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      occlusionQueryEnable = occlusionQueryEnable_;
      return *this;
    }

    CommandBufferInheritanceInfo &
      setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      queryFlags = queryFlags_;
      return *this;
    }

    CommandBufferInheritanceInfo &
      setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStatistics = pipelineStatistics_;
      return *this;
    }

    operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferInheritanceInfo *>( this );
    }

    operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferInheritanceInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferInheritanceInfo const & ) const = default;
#else
    bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) &&
             ( subpass == rhs.subpass ) && ( framebuffer == rhs.framebuffer ) &&
             ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) &&
             ( pipelineStatistics == rhs.pipelineStatistics );
    }

    bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType       = StructureType::eCommandBufferInheritanceInfo;
    const void *                                      pNext       = {};
    VULKAN_HPP_NAMESPACE::RenderPass                  renderPass  = {};
    uint32_t                                          subpass     = {};
    VULKAN_HPP_NAMESPACE::Framebuffer                 framebuffer = {};
    VULKAN_HPP_NAMESPACE::Bool32                      occlusionQueryEnable = {};
    VULKAN_HPP_NAMESPACE::QueryControlFlags           queryFlags           = {};
    VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics   = {};
  };
  static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CommandBufferInheritanceInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
  {
    using Type = CommandBufferInheritanceInfo;
  };

  struct CommandBufferBeginInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(
      VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags              flags_            = {},
      const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pInheritanceInfo( pInheritanceInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferBeginInfo( *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo &
                            operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>( &rhs );
      return *this;
    }

    CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    CommandBufferBeginInfo & setPInheritanceInfo(
      const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pInheritanceInfo = pInheritanceInfo_;
      return *this;
    }

    operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferBeginInfo *>( this );
    }

    operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferBeginInfo const & ) const = default;
#else
    bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( pInheritanceInfo == rhs.pInheritanceInfo );
    }

    bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                        sType = StructureType::eCommandBufferBeginInfo;
    const void *                                               pNext = {};
    VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags              flags = {};
    const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {};
  };
  static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
  {
    using Type = CommandBufferBeginInfo;
  };

  struct CommandBufferInheritanceConditionalRenderingInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(
      VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT
      : conditionalRenderingEnable( conditionalRenderingEnable_ )
    {}

    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(
      CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceConditionalRenderingInfoEXT(
      VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferInheritanceConditionalRenderingInfoEXT(
          *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT &
                            operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceConditionalRenderingInfoEXT &
      operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
      return *this;
    }

    CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CommandBufferInheritanceConditionalRenderingInfoEXT &
      setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      conditionalRenderingEnable = conditionalRenderingEnable_;
      return *this;
    }

    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
    }

    operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default;
#else
    bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
    }

    bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        conditionalRenderingEnable = {};
  };
  static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) ==
                   sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CommandBufferInheritanceConditionalRenderingInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT>
  {
    using Type = CommandBufferInheritanceConditionalRenderingInfoEXT;
  };

  struct CommandBufferInheritanceRenderPassTransformInfoQCOM
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ =
        VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
      VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {} ) VULKAN_HPP_NOEXCEPT
      : transform( transform_ )
      , renderArea( renderArea_ )
    {}

    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(
      CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceRenderPassTransformInfoQCOM(
      VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferInheritanceRenderPassTransformInfoQCOM(
          *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM &
                            operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceRenderPassTransformInfoQCOM &
      operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
      return *this;
    }

    CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CommandBufferInheritanceRenderPassTransformInfoQCOM &
      setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }

    CommandBufferInheritanceRenderPassTransformInfoQCOM &
      setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
    {
      renderArea = renderArea_;
      return *this;
    }

    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
    }

    operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default;
#else
    bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) &&
             ( renderArea == rhs.renderArea );
    }

    bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform =
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
    VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
  };
  static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) ==
                   sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CommandBufferInheritanceRenderPassTransformInfoQCOM>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM>
  {
    using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
  };

  struct Viewport
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Viewport( float x_        = {},
                                   float y_        = {},
                                   float width_    = {},
                                   float height_   = {},
                                   float minDepth_ = {},
                                   float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT
      : x( x_ )
      , y( y_ )
      , width( width_ )
      , height( height_ )
      , minDepth( minDepth_ )
      , maxDepth( maxDepth_ )
    {}

    VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) ) {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
      return *this;
    }

    Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      minDepth = minDepth_;
      return *this;
    }

    Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      maxDepth = maxDepth_;
      return *this;
    }

    operator VkViewport const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkViewport *>( this );
    }

    operator VkViewport &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkViewport *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Viewport const & ) const = default;
#else
    bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) &&
             ( minDepth == rhs.minDepth ) && ( maxDepth == rhs.maxDepth );
    }

    bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float x        = {};
    float y        = {};
    float width    = {};
    float height   = {};
    float minDepth = {};
    float maxDepth = {};
  };
  static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );

  struct CommandBufferInheritanceViewportScissorInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eCommandBufferInheritanceViewportScissorInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV(
      VULKAN_HPP_NAMESPACE::Bool32           viewportScissor2D_  = {},
      uint32_t                               viewportDepthCount_ = {},
      const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_    = {} ) VULKAN_HPP_NOEXCEPT
      : viewportScissor2D( viewportScissor2D_ )
      , viewportDepthCount( viewportDepthCount_ )
      , pViewportDepths( pViewportDepths_ )
    {}

    VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV(
      CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : CommandBufferInheritanceViewportScissorInfoNV(
          *reinterpret_cast<CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV &
                            operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferInheritanceViewportScissorInfoNV &
      operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV const *>( &rhs );
      return *this;
    }

    CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CommandBufferInheritanceViewportScissorInfoNV &
      setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportScissor2D = viewportScissor2D_;
      return *this;
    }

    CommandBufferInheritanceViewportScissorInfoNV &
      setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportDepthCount = viewportDepthCount_;
      return *this;
    }

    CommandBufferInheritanceViewportScissorInfoNV &
      setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewportDepths = pViewportDepths_;
      return *this;
    }

    operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
    }

    operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferInheritanceViewportScissorInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default;
#else
    bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) &&
             ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths );
    }

    bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV;
    const void *                           pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32           viewportScissor2D  = {};
    uint32_t                               viewportDepthCount = {};
    const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths    = {};
  };
  static_assert( sizeof( CommandBufferInheritanceViewportScissorInfoNV ) ==
                   sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CommandBufferInheritanceViewportScissorInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferInheritanceViewportScissorInfoNV>
  {
    using Type = CommandBufferInheritanceViewportScissorInfoNV;
  };

  struct ConditionalRenderingBeginInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(
      VULKAN_HPP_NAMESPACE::Buffer                       buffer_ = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                   offset_ = {},
      VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_  = {} ) VULKAN_HPP_NOEXCEPT
      : buffer( buffer_ )
      , offset( offset_ )
      , flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT &
                            operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
      return *this;
    }

    ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    ConditionalRenderingBeginInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( this );
    }

    operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default;
#else
    bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) &&
             ( flags == rhs.flags );
    }

    bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType  = StructureType::eConditionalRenderingBeginInfoEXT;
    const void *                                       pNext  = {};
    VULKAN_HPP_NAMESPACE::Buffer                       buffer = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                   offset = {};
    VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags  = {};
  };
  static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
  {
    using Type = ConditionalRenderingBeginInfoEXT;
  };

  struct DebugUtilsLabelEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char *                 pLabelName_ = {},
                                                std::array<float, 4> const & color_      = {} ) VULKAN_HPP_NOEXCEPT
      : pLabelName( pLabelName_ )
      , color( color_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT &
                            operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
      return *this;
    }

    DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT
    {
      pLabelName = pLabelName_;
      return *this;
    }

    DebugUtilsLabelEXT & setColor( std::array<float, 4> color_ ) VULKAN_HPP_NOEXCEPT
    {
      color = color_;
      return *this;
    }

    operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsLabelEXT *>( this );
    }

    operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsLabelEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugUtilsLabelEXT const & ) const = default;
#else
    bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pLabelName == rhs.pLabelName ) &&
             ( color == rhs.color );
    }

    bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType      = StructureType::eDebugUtilsLabelEXT;
    const void *                                   pNext      = {};
    const char *                                   pLabelName = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color      = {};
  };
  static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
  {
    using Type = DebugUtilsLabelEXT;
  };

  class QueryPool
  {
  public:
    using CType = VkQueryPool;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;

  public:
    VULKAN_HPP_CONSTEXPR         QueryPool() = default;
    VULKAN_HPP_CONSTEXPR         QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT : m_queryPool( queryPool ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    QueryPool & operator=( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
    {
      m_queryPool = queryPool;
      return *this;
    }
#endif

    QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_queryPool = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueryPool const & ) const = default;
#else
    bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_queryPool == rhs.m_queryPool;
    }

    bool operator!=( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_queryPool != rhs.m_queryPool;
    }

    bool operator<( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_queryPool < rhs.m_queryPool;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
    {
      return m_queryPool;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_queryPool != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_queryPool == VK_NULL_HANDLE;
    }

  private:
    VkQueryPool m_queryPool = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eQueryPool>
  {
    using type = VULKAN_HPP_NAMESPACE::QueryPool;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
  {
    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
  {
    using Type = VULKAN_HPP_NAMESPACE::QueryPool;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::QueryPool>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct RenderPassBeginInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass         renderPass_      = {},
                           VULKAN_HPP_NAMESPACE::Framebuffer        framebuffer_     = {},
                           VULKAN_HPP_NAMESPACE::Rect2D             renderArea_      = {},
                           uint32_t                                 clearValueCount_ = {},
                           const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_    = {} ) VULKAN_HPP_NOEXCEPT
      : renderPass( renderPass_ )
      , framebuffer( framebuffer_ )
      , renderArea( renderArea_ )
      , clearValueCount( clearValueCount_ )
      , pClearValues( pClearValues_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassBeginInfo(
      VULKAN_HPP_NAMESPACE::RenderPass                                                              renderPass_,
      VULKAN_HPP_NAMESPACE::Framebuffer                                                             framebuffer_,
      VULKAN_HPP_NAMESPACE::Rect2D                                                                  renderArea_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
      : renderPass( renderPass_ )
      , framebuffer( framebuffer_ )
      , renderArea( renderArea_ )
      , clearValueCount( static_cast<uint32_t>( clearValues_.size() ) )
      , pClearValues( clearValues_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo &
                            operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
      return *this;
    }

    RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPass = renderPass_;
      return *this;
    }

    RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      framebuffer = framebuffer_;
      return *this;
    }

    RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
    {
      renderArea = renderArea_;
      return *this;
    }

    RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
    {
      clearValueCount = clearValueCount_;
      return *this;
    }

    RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pClearValues = pClearValues_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassBeginInfo & setClearValues(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
      VULKAN_HPP_NOEXCEPT
    {
      clearValueCount = static_cast<uint32_t>( clearValues_.size() );
      pClearValues    = clearValues_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassBeginInfo *>( this );
    }

    operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassBeginInfo const & ) const = default;
#else
    bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) &&
             ( framebuffer == rhs.framebuffer ) && ( renderArea == rhs.renderArea ) &&
             ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues );
    }

    bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType           = StructureType::eRenderPassBeginInfo;
    const void *                             pNext           = {};
    VULKAN_HPP_NAMESPACE::RenderPass         renderPass      = {};
    VULKAN_HPP_NAMESPACE::Framebuffer        framebuffer     = {};
    VULKAN_HPP_NAMESPACE::Rect2D             renderArea      = {};
    uint32_t                                 clearValueCount = {};
    const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues    = {};
  };
  static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
  {
    using Type = RenderPassBeginInfo;
  };

  struct SubpassBeginInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ =
                                             VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT
      : contents( contents_ )
    {}

    VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
      return *this;
    }

    SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
    {
      contents = contents_;
      return *this;
    }

    operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassBeginInfo *>( this );
    }

    operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassBeginInfo const & ) const = default;
#else
    bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents );
    }

    bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType    = StructureType::eSubpassBeginInfo;
    const void *                          pNext    = {};
    VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
  };
  static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSubpassBeginInfo>
  {
    using Type = SubpassBeginInfo;
  };
  using SubpassBeginInfoKHR = SubpassBeginInfo;

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  class VideoSessionKHR
  {
  public:
    using CType = VkVideoSessionKHR;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;

  public:
    VULKAN_HPP_CONSTEXPR         VideoSessionKHR() = default;
    VULKAN_HPP_CONSTEXPR         VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT
      : m_videoSessionKHR( videoSessionKHR )
    {}

#  if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    VideoSessionKHR & operator=( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT
    {
      m_videoSessionKHR = videoSessionKHR;
      return *this;
    }
#  endif

    VideoSessionKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_videoSessionKHR = {};
      return *this;
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoSessionKHR const & ) const = default;
#  else
    bool operator==( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionKHR == rhs.m_videoSessionKHR;
    }

    bool operator!=( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionKHR != rhs.m_videoSessionKHR;
    }

    bool operator<( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionKHR < rhs.m_videoSessionKHR;
    }
#  endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionKHR;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionKHR != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionKHR == VK_NULL_HANDLE;
    }

  private:
    VkVideoSessionKHR m_videoSessionKHR = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eVideoSessionKHR>
  {
    using type = VULKAN_HPP_NAMESPACE::VideoSessionKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  class VideoSessionParametersKHR
  {
  public:
    using CType = VkVideoSessionParametersKHR;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;

  public:
    VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default;
    VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT
      VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT
      : m_videoSessionParametersKHR( videoSessionParametersKHR )
    {}

#  if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    VideoSessionParametersKHR & operator=( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT
    {
      m_videoSessionParametersKHR = videoSessionParametersKHR;
      return *this;
    }
#  endif

    VideoSessionParametersKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_videoSessionParametersKHR = {};
      return *this;
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoSessionParametersKHR const & ) const = default;
#  else
    bool operator==( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionParametersKHR == rhs.m_videoSessionParametersKHR;
    }

    bool operator!=( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionParametersKHR != rhs.m_videoSessionParametersKHR;
    }

    bool operator<( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionParametersKHR < rhs.m_videoSessionParametersKHR;
    }
#  endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionParametersKHR;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionParametersKHR != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_videoSessionParametersKHR == VK_NULL_HANDLE;
    }

  private:
    VkVideoSessionParametersKHR m_videoSessionParametersKHR = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eVideoSessionParametersKHR>
  {
    using type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  class ImageView
  {
  public:
    using CType = VkImageView;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;

  public:
    VULKAN_HPP_CONSTEXPR         ImageView() = default;
    VULKAN_HPP_CONSTEXPR         ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT : m_imageView( imageView ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    ImageView & operator=( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
    {
      m_imageView = imageView;
      return *this;
    }
#endif

    ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_imageView = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageView const & ) const = default;
#else
    bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_imageView == rhs.m_imageView;
    }

    bool operator!=( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_imageView != rhs.m_imageView;
    }

    bool operator<( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_imageView < rhs.m_imageView;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
    {
      return m_imageView;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_imageView != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_imageView == VK_NULL_HANDLE;
    }

  private:
    VkImageView m_imageView = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eImageView>
  {
    using type = VULKAN_HPP_NAMESPACE::ImageView;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
  {
    using Type = VULKAN_HPP_NAMESPACE::ImageView;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
  {
    using Type = VULKAN_HPP_NAMESPACE::ImageView;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ImageView>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoPictureResourceKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoPictureResourceKHR( VULKAN_HPP_NAMESPACE::Offset2D  codedOffset_      = {},
                               VULKAN_HPP_NAMESPACE::Extent2D  codedExtent_      = {},
                               uint32_t                        baseArrayLayer_   = {},
                               VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {} ) VULKAN_HPP_NOEXCEPT
      : codedOffset( codedOffset_ )
      , codedExtent( codedExtent_ )
      , baseArrayLayer( baseArrayLayer_ )
      , imageViewBinding( imageViewBinding_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoPictureResourceKHR( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoPictureResourceKHR( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoPictureResourceKHR( *reinterpret_cast<VideoPictureResourceKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceKHR &
                            operator=( VideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoPictureResourceKHR & operator=( VkVideoPictureResourceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const *>( &rhs );
      return *this;
    }

    VideoPictureResourceKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoPictureResourceKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      codedOffset = codedOffset_;
      return *this;
    }

    VideoPictureResourceKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      codedExtent = codedExtent_;
      return *this;
    }

    VideoPictureResourceKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      baseArrayLayer = baseArrayLayer_;
      return *this;
    }

    VideoPictureResourceKHR &
      setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      imageViewBinding = imageViewBinding_;
      return *this;
    }

    operator VkVideoPictureResourceKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoPictureResourceKHR *>( this );
    }

    operator VkVideoPictureResourceKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoPictureResourceKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoPictureResourceKHR const & ) const = default;
#  else
    bool operator==( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) &&
             ( codedExtent == rhs.codedExtent ) && ( baseArrayLayer == rhs.baseArrayLayer ) &&
             ( imageViewBinding == rhs.imageViewBinding );
    }

    bool operator!=( VideoPictureResourceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::eVideoPictureResourceKHR;
    const void *                        pNext            = {};
    VULKAN_HPP_NAMESPACE::Offset2D      codedOffset      = {};
    VULKAN_HPP_NAMESPACE::Extent2D      codedExtent      = {};
    uint32_t                            baseArrayLayer   = {};
    VULKAN_HPP_NAMESPACE::ImageView     imageViewBinding = {};
  };
  static_assert( sizeof( VideoPictureResourceKHR ) == sizeof( VkVideoPictureResourceKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoPictureResourceKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoPictureResourceKHR>
  {
    using Type = VideoPictureResourceKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoReferenceSlotKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR(
      int8_t                                                slotIndex_        = {},
      const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ = {} ) VULKAN_HPP_NOEXCEPT
      : slotIndex( slotIndex_ )
      , pPictureResource( pPictureResource_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoReferenceSlotKHR( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoReferenceSlotKHR( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoReferenceSlotKHR( *reinterpret_cast<VideoReferenceSlotKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotKHR &
                            operator=( VideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoReferenceSlotKHR & operator=( VkVideoReferenceSlotKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR const *>( &rhs );
      return *this;
    }

    VideoReferenceSlotKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoReferenceSlotKHR & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      slotIndex = slotIndex_;
      return *this;
    }

    VideoReferenceSlotKHR &
      setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT
    {
      pPictureResource = pPictureResource_;
      return *this;
    }

    operator VkVideoReferenceSlotKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoReferenceSlotKHR *>( this );
    }

    operator VkVideoReferenceSlotKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoReferenceSlotKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoReferenceSlotKHR const & ) const = default;
#  else
    bool operator==( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) &&
             ( pPictureResource == rhs.pPictureResource );
    }

    bool operator!=( VideoReferenceSlotKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType            = StructureType::eVideoReferenceSlotKHR;
    const void *                                          pNext            = {};
    int8_t                                                slotIndex        = {};
    const VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR * pPictureResource = {};
  };
  static_assert( sizeof( VideoReferenceSlotKHR ) == sizeof( VkVideoReferenceSlotKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoReferenceSlotKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoReferenceSlotKHR>
  {
    using Type = VideoReferenceSlotKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoBeginCodingInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR(
      VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR         flags_                  = {},
      VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_     = {},
      VULKAN_HPP_NAMESPACE::VideoSessionKHR                  videoSession_           = {},
      VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR        videoSessionParameters_ = {},
      uint32_t                                               referenceSlotCount_     = {},
      const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *    pReferenceSlots_        = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , codecQualityPreset( codecQualityPreset_ )
      , videoSession( videoSession_ )
      , videoSessionParameters( videoSessionParameters_ )
      , referenceSlotCount( referenceSlotCount_ )
      , pReferenceSlots( pReferenceSlots_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoBeginCodingInfoKHR( *reinterpret_cast<VideoBeginCodingInfoKHR const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoBeginCodingInfoKHR(
      VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR         flags_,
      VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_,
      VULKAN_HPP_NAMESPACE::VideoSessionKHR                  videoSession_,
      VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR        videoSessionParameters_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
        referenceSlots_ )
      : flags( flags_ )
      , codecQualityPreset( codecQualityPreset_ )
      , videoSession( videoSession_ )
      , videoSessionParameters( videoSessionParameters_ )
      , referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) )
      , pReferenceSlots( referenceSlots_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR &
                            operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const *>( &rhs );
      return *this;
    }

    VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VideoBeginCodingInfoKHR & setCodecQualityPreset(
      VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset_ ) VULKAN_HPP_NOEXCEPT
    {
      codecQualityPreset = codecQualityPreset_;
      return *this;
    }

    VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
    {
      videoSession = videoSession_;
      return *this;
    }

    VideoBeginCodingInfoKHR & setVideoSessionParameters(
      VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT
    {
      videoSessionParameters = videoSessionParameters_;
      return *this;
    }

    VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = referenceSlotCount_;
      return *this;
    }

    VideoBeginCodingInfoKHR &
      setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      pReferenceSlots = pReferenceSlots_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoBeginCodingInfoKHR & setReferenceSlots(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
        referenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
      pReferenceSlots    = referenceSlots_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( this );
    }

    operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoBeginCodingInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default;
#  else
    bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( codecQualityPreset == rhs.codecQualityPreset ) && ( videoSession == rhs.videoSession ) &&
             ( videoSessionParameters == rhs.videoSessionParameters ) &&
             ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots );
    }

    bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType              = StructureType::eVideoBeginCodingInfoKHR;
    const void *                                           pNext              = {};
    VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR         flags              = {};
    VULKAN_HPP_NAMESPACE::VideoCodingQualityPresetFlagsKHR codecQualityPreset = {};
    VULKAN_HPP_NAMESPACE::VideoSessionKHR                  videoSession       = {};
    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR        videoSessionParameters = {};
    uint32_t                                               referenceSlotCount     = {};
    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR *    pReferenceSlots        = {};
  };
  static_assert( sizeof( VideoBeginCodingInfoKHR ) == sizeof( VkVideoBeginCodingInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoBeginCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoBeginCodingInfoKHR>
  {
    using Type = VideoBeginCodingInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  class PipelineLayout
  {
  public:
    using CType = VkPipelineLayout;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;

  public:
    VULKAN_HPP_CONSTEXPR         PipelineLayout() = default;
    VULKAN_HPP_CONSTEXPR         PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
      : m_pipelineLayout( pipelineLayout )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
    {
      m_pipelineLayout = pipelineLayout;
      return *this;
    }
#endif

    PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_pipelineLayout = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineLayout const & ) const = default;
#else
    bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineLayout == rhs.m_pipelineLayout;
    }

    bool operator!=( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineLayout != rhs.m_pipelineLayout;
    }

    bool operator<( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineLayout < rhs.m_pipelineLayout;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineLayout;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineLayout != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineLayout == VK_NULL_HANDLE;
    }

  private:
    VkPipelineLayout m_pipelineLayout = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePipelineLayout>
  {
    using type = VULKAN_HPP_NAMESPACE::PipelineLayout;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout>
  {
    using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout>
  {
    using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineLayout>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  class DescriptorSet
  {
  public:
    using CType = VkDescriptorSet;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;

  public:
    VULKAN_HPP_CONSTEXPR         DescriptorSet() = default;
    VULKAN_HPP_CONSTEXPR         DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
      : m_descriptorSet( descriptorSet )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
    {
      m_descriptorSet = descriptorSet;
      return *this;
    }
#endif

    DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_descriptorSet = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSet const & ) const = default;
#else
    bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSet == rhs.m_descriptorSet;
    }

    bool operator!=( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSet != rhs.m_descriptorSet;
    }

    bool operator<( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSet < rhs.m_descriptorSet;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSet;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSet != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSet == VK_NULL_HANDLE;
    }

  private:
    VkDescriptorSet m_descriptorSet = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDescriptorSet>
  {
    using type = VULKAN_HPP_NAMESPACE::DescriptorSet;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
  {
    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
  {
    using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSet>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  class Pipeline
  {
  public:
    using CType = VkPipeline;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;

  public:
    VULKAN_HPP_CONSTEXPR         Pipeline() = default;
    VULKAN_HPP_CONSTEXPR         Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT : m_pipeline( pipeline ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Pipeline & operator=( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
    {
      m_pipeline = pipeline;
      return *this;
    }
#endif

    Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_pipeline = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Pipeline const & ) const = default;
#else
    bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_pipeline == rhs.m_pipeline;
    }

    bool operator!=( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_pipeline != rhs.m_pipeline;
    }

    bool operator<( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_pipeline < rhs.m_pipeline;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
    {
      return m_pipeline;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_pipeline != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_pipeline == VK_NULL_HANDLE;
    }

  private:
    VkPipeline m_pipeline = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePipeline>
  {
    using type = VULKAN_HPP_NAMESPACE::Pipeline;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
  {
    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
  {
    using Type = VULKAN_HPP_NAMESPACE::Pipeline;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Pipeline>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct ImageBlit
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers          srcSubresource_ = {},
                 std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_     = {},
                 VULKAN_HPP_NAMESPACE::ImageSubresourceLayers          dstSubresource_ = {},
                 std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_     = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubresource( srcSubresource_ )
      , srcOffsets( srcOffsets_ )
      , dstSubresource( dstSubresource_ )
      , dstOffsets( dstOffsets_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
      return *this;
    }

    ImageBlit &
      setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffsets = srcOffsets_;
      return *this;
    }

    ImageBlit &
      setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D, 2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffsets = dstOffsets_;
      return *this;
    }

    operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageBlit *>( this );
    }

    operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageBlit *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageBlit const & ) const = default;
#else
    bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) &&
             ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets );
    }

    bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers                            srcSubresource = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets     = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers                            dstSubresource = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets     = {};
  };
  static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );

  struct ImageSubresourceRange
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_     = {},
                                                uint32_t                               baseMipLevel_   = {},
                                                uint32_t                               levelCount_     = {},
                                                uint32_t                               baseArrayLayer_ = {},
                                                uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask( aspectMask_ )
      , baseMipLevel( baseMipLevel_ )
      , levelCount( levelCount_ )
      , baseArrayLayer( baseArrayLayer_ )
      , layerCount( layerCount_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange &
                            operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
      return *this;
    }

    ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      baseMipLevel = baseMipLevel_;
      return *this;
    }

    ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
    {
      levelCount = levelCount_;
      return *this;
    }

    ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      baseArrayLayer = baseArrayLayer_;
      return *this;
    }

    ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }

    operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSubresourceRange *>( this );
    }

    operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSubresourceRange *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSubresourceRange const & ) const = default;
#else
    bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) &&
             ( levelCount == rhs.levelCount ) && ( baseArrayLayer == rhs.baseArrayLayer ) &&
             ( layerCount == rhs.layerCount );
    }

    bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask     = {};
    uint32_t                               baseMipLevel   = {};
    uint32_t                               levelCount     = {};
    uint32_t                               baseArrayLayer = {};
    uint32_t                               layerCount     = {};
  };
  static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoCodingControlInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoCodingControlInfoKHR( *reinterpret_cast<VideoCodingControlInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR &
                            operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const *>( &rhs );
      return *this;
    }

    VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoCodingControlInfoKHR *>( this );
    }

    operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoCodingControlInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoCodingControlInfoKHR const & ) const = default;
#  else
    bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
    }

    bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType = StructureType::eVideoCodingControlInfoKHR;
    const void *                                     pNext = {};
    VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {};
  };
  static_assert( sizeof( VideoCodingControlInfoKHR ) == sizeof( VkVideoCodingControlInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoCodingControlInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoCodingControlInfoKHR>
  {
    using Type = VideoCodingControlInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  struct CopyAccelerationStructureInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR         src_ = {},
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR         dst_ = {},
      VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ =
        VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone ) VULKAN_HPP_NOEXCEPT
      : src( src_ )
      , dst( dst_ )
      , mode( mode_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyAccelerationStructureInfoKHR( *reinterpret_cast<CopyAccelerationStructureInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR &
                            operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const *>( &rhs );
      return *this;
    }

    CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
    {
      src = src_;
      return *this;
    }

    CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
    {
      dst = dst_;
      return *this;
    }

    CopyAccelerationStructureInfoKHR &
      setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( this );
    }

    operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default;
#else
    bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) &&
             ( mode == rhs.mode );
    }

    bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType = StructureType::eCopyAccelerationStructureInfoKHR;
    const void *                                           pNext = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR         src   = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR         dst   = {};
    VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode =
      VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
  };
  static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CopyAccelerationStructureInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCopyAccelerationStructureInfoKHR>
  {
    using Type = CopyAccelerationStructureInfoKHR;
  };

  struct CopyAccelerationStructureToMemoryInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eCopyAccelerationStructureToMemoryInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    CopyAccelerationStructureToMemoryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR         src_ = {},
                                              VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR           dst_ = {},
                                              VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ =
                                                VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone )
      VULKAN_HPP_NOEXCEPT
      : src( src_ )
      , dst( dst_ )
      , mode( mode_ )
    {}

    CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : CopyAccelerationStructureToMemoryInfoKHR(
          *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    CopyAccelerationStructureToMemoryInfoKHR &
      operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyAccelerationStructureToMemoryInfoKHR &
      operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
      return *this;
    }

    CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CopyAccelerationStructureToMemoryInfoKHR &
      setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
    {
      src = src_;
      return *this;
    }

    CopyAccelerationStructureToMemoryInfoKHR &
      setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
    {
      dst = dst_;
      return *this;
    }

    CopyAccelerationStructureToMemoryInfoKHR &
      setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
    }

    operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
    const void *                                   pNext = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src   = {};
    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR   dst   = {};
    VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode =
      VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
  };
  static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) ==
                   sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CopyAccelerationStructureToMemoryInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCopyAccelerationStructureToMemoryInfoKHR>
  {
    using Type = CopyAccelerationStructureToMemoryInfoKHR;
  };

  struct CopyBufferInfo2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      CopyBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer                 srcBuffer_   = {},
                          VULKAN_HPP_NAMESPACE::Buffer                 dstBuffer_   = {},
                          uint32_t                                     regionCount_ = {},
                          const VULKAN_HPP_NAMESPACE::BufferCopy2KHR * pRegions_    = {} ) VULKAN_HPP_NOEXCEPT
      : srcBuffer( srcBuffer_ )
      , dstBuffer( dstBuffer_ )
      , regionCount( regionCount_ )
      , pRegions( pRegions_ )
    {}

    VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyBufferInfo2KHR( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyBufferInfo2KHR( *reinterpret_cast<CopyBufferInfo2KHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyBufferInfo2KHR(
      VULKAN_HPP_NAMESPACE::Buffer                                                                      srcBuffer_,
      VULKAN_HPP_NAMESPACE::Buffer                                                                      dstBuffer_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2KHR> const & regions_ )
      : srcBuffer( srcBuffer_ )
      , dstBuffer( dstBuffer_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2KHR &
                            operator=( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyBufferInfo2KHR & operator=( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR const *>( &rhs );
      return *this;
    }

    CopyBufferInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CopyBufferInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBuffer = srcBuffer_;
      return *this;
    }

    CopyBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBuffer = dstBuffer_;
      return *this;
    }

    CopyBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    CopyBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyBufferInfo2KHR & setRegions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2KHR> const & regions_ )
      VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkCopyBufferInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyBufferInfo2KHR *>( this );
    }

    operator VkCopyBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyBufferInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyBufferInfo2KHR const & ) const = default;
#else
    bool operator==( CopyBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) &&
             ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
    }

    bool operator!=( CopyBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType       = StructureType::eCopyBufferInfo2KHR;
    const void *                                 pNext       = {};
    VULKAN_HPP_NAMESPACE::Buffer                 srcBuffer   = {};
    VULKAN_HPP_NAMESPACE::Buffer                 dstBuffer   = {};
    uint32_t                                     regionCount = {};
    const VULKAN_HPP_NAMESPACE::BufferCopy2KHR * pRegions    = {};
  };
  static_assert( sizeof( CopyBufferInfo2KHR ) == sizeof( VkCopyBufferInfo2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CopyBufferInfo2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCopyBufferInfo2KHR>
  {
    using Type = CopyBufferInfo2KHR;
  };

  struct CopyBufferToImageInfo2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR(
      VULKAN_HPP_NAMESPACE::Buffer                      srcBuffer_      = {},
      VULKAN_HPP_NAMESPACE::Image                       dstImage_       = {},
      VULKAN_HPP_NAMESPACE::ImageLayout                 dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      uint32_t                                          regionCount_    = {},
      const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_       = {} ) VULKAN_HPP_NOEXCEPT
      : srcBuffer( srcBuffer_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( regionCount_ )
      , pRegions( pRegions_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CopyBufferToImageInfo2KHR( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyBufferToImageInfo2KHR( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyBufferToImageInfo2KHR( *reinterpret_cast<CopyBufferToImageInfo2KHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyBufferToImageInfo2KHR(
      VULKAN_HPP_NAMESPACE::Buffer      srcBuffer_,
      VULKAN_HPP_NAMESPACE::Image       dstImage_,
      VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
      : srcBuffer( srcBuffer_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2KHR &
                            operator=( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyBufferToImageInfo2KHR & operator=( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR const *>( &rhs );
      return *this;
    }

    CopyBufferToImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CopyBufferToImageInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBuffer = srcBuffer_;
      return *this;
    }

    CopyBufferToImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    CopyBufferToImageInfo2KHR &
      setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    CopyBufferToImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    CopyBufferToImageInfo2KHR &
      setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyBufferToImageInfo2KHR & setRegions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
      VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkCopyBufferToImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( this );
    }

    operator VkCopyBufferToImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyBufferToImageInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyBufferToImageInfo2KHR const & ) const = default;
#else
    bool operator==( CopyBufferToImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) &&
             ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) &&
             ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
    }

    bool operator!=( CopyBufferToImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType          = StructureType::eCopyBufferToImageInfo2KHR;
    const void *                                      pNext          = {};
    VULKAN_HPP_NAMESPACE::Buffer                      srcBuffer      = {};
    VULKAN_HPP_NAMESPACE::Image                       dstImage       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout                 dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    uint32_t                                          regionCount    = {};
    const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions       = {};
  };
  static_assert( sizeof( CopyBufferToImageInfo2KHR ) == sizeof( VkCopyBufferToImageInfo2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CopyBufferToImageInfo2KHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2KHR>
  {
    using Type = CopyBufferToImageInfo2KHR;
  };

  struct ImageCopy
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
                                    VULKAN_HPP_NAMESPACE::Offset3D               srcOffset_      = {},
                                    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
                                    VULKAN_HPP_NAMESPACE::Offset3D               dstOffset_      = {},
                                    VULKAN_HPP_NAMESPACE::Extent3D               extent_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubresource( srcSubresource_ )
      , srcOffset( srcOffset_ )
      , dstSubresource( dstSubresource_ )
      , dstOffset( dstOffset_ )
      , extent( extent_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
      return *this;
    }

    ImageCopy &
      setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    ImageCopy &
      setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageCopy *>( this );
    }

    operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageCopy *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageCopy const & ) const = default;
#else
    bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
             ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
    }

    bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
    VULKAN_HPP_NAMESPACE::Offset3D               srcOffset      = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
    VULKAN_HPP_NAMESPACE::Offset3D               dstOffset      = {};
    VULKAN_HPP_NAMESPACE::Extent3D               extent         = {};
  };
  static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );

  struct ImageCopy2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageCopy2KHR( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
                                        VULKAN_HPP_NAMESPACE::Offset3D               srcOffset_      = {},
                                        VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
                                        VULKAN_HPP_NAMESPACE::Offset3D               dstOffset_      = {},
                                        VULKAN_HPP_NAMESPACE::Extent3D               extent_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubresource( srcSubresource_ )
      , srcOffset( srcOffset_ )
      , dstSubresource( dstSubresource_ )
      , dstOffset( dstOffset_ )
      , extent( extent_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageCopy2KHR( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCopy2KHR( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageCopy2KHR( *reinterpret_cast<ImageCopy2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageCopy2KHR & operator=( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCopy2KHR & operator=( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2KHR const *>( &rhs );
      return *this;
    }

    ImageCopy2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageCopy2KHR &
      setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    ImageCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    ImageCopy2KHR &
      setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    ImageCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    ImageCopy2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    operator VkImageCopy2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageCopy2KHR *>( this );
    }

    operator VkImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageCopy2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageCopy2KHR const & ) const = default;
#else
    bool operator==( ImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) &&
             ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
             ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
    }

    bool operator!=( ImageCopy2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType          = StructureType::eImageCopy2KHR;
    const void *                                 pNext          = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
    VULKAN_HPP_NAMESPACE::Offset3D               srcOffset      = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
    VULKAN_HPP_NAMESPACE::Offset3D               dstOffset      = {};
    VULKAN_HPP_NAMESPACE::Extent3D               extent         = {};
  };
  static_assert( sizeof( ImageCopy2KHR ) == sizeof( VkImageCopy2KHR ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageCopy2KHR>
  {
    using Type = ImageCopy2KHR;
  };

  struct CopyImageInfo2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR(
      VULKAN_HPP_NAMESPACE::Image                 srcImage_       = {},
      VULKAN_HPP_NAMESPACE::ImageLayout           srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      VULKAN_HPP_NAMESPACE::Image                 dstImage_       = {},
      VULKAN_HPP_NAMESPACE::ImageLayout           dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      uint32_t                                    regionCount_    = {},
      const VULKAN_HPP_NAMESPACE::ImageCopy2KHR * pRegions_       = {} ) VULKAN_HPP_NOEXCEPT
      : srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( regionCount_ )
      , pRegions( pRegions_ )
    {}

    VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyImageInfo2KHR( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyImageInfo2KHR( *reinterpret_cast<CopyImageInfo2KHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageInfo2KHR(
      VULKAN_HPP_NAMESPACE::Image                                                                      srcImage_,
      VULKAN_HPP_NAMESPACE::ImageLayout                                                                srcImageLayout_,
      VULKAN_HPP_NAMESPACE::Image                                                                      dstImage_,
      VULKAN_HPP_NAMESPACE::ImageLayout                                                                dstImageLayout_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ )
      : srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2KHR &
                            operator=( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyImageInfo2KHR & operator=( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR const *>( &rhs );
      return *this;
    }

    CopyImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CopyImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    CopyImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    CopyImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    CopyImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    CopyImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    CopyImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageInfo2KHR & setRegions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ )
      VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkCopyImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyImageInfo2KHR *>( this );
    }

    operator VkCopyImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyImageInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyImageInfo2KHR const & ) const = default;
#else
    bool operator==( CopyImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) &&
             ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) &&
             ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) &&
             ( pRegions == rhs.pRegions );
    }

    bool operator!=( CopyImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType          = StructureType::eCopyImageInfo2KHR;
    const void *                                pNext          = {};
    VULKAN_HPP_NAMESPACE::Image                 srcImage       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout           srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::Image                 dstImage       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout           dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    uint32_t                                    regionCount    = {};
    const VULKAN_HPP_NAMESPACE::ImageCopy2KHR * pRegions       = {};
  };
  static_assert( sizeof( CopyImageInfo2KHR ) == sizeof( VkCopyImageInfo2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CopyImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCopyImageInfo2KHR>
  {
    using Type = CopyImageInfo2KHR;
  };

  struct CopyImageToBufferInfo2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR(
      VULKAN_HPP_NAMESPACE::Image                       srcImage_       = {},
      VULKAN_HPP_NAMESPACE::ImageLayout                 srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      VULKAN_HPP_NAMESPACE::Buffer                      dstBuffer_      = {},
      uint32_t                                          regionCount_    = {},
      const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_       = {} ) VULKAN_HPP_NOEXCEPT
      : srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstBuffer( dstBuffer_ )
      , regionCount( regionCount_ )
      , pRegions( pRegions_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CopyImageToBufferInfo2KHR( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyImageToBufferInfo2KHR( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyImageToBufferInfo2KHR( *reinterpret_cast<CopyImageToBufferInfo2KHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageToBufferInfo2KHR(
      VULKAN_HPP_NAMESPACE::Image       srcImage_,
      VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
      VULKAN_HPP_NAMESPACE::Buffer      dstBuffer_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
      : srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstBuffer( dstBuffer_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2KHR &
                            operator=( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyImageToBufferInfo2KHR & operator=( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR const *>( &rhs );
      return *this;
    }

    CopyImageToBufferInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CopyImageToBufferInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    CopyImageToBufferInfo2KHR &
      setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    CopyImageToBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBuffer = dstBuffer_;
      return *this;
    }

    CopyImageToBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    CopyImageToBufferInfo2KHR &
      setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    CopyImageToBufferInfo2KHR & setRegions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
      VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkCopyImageToBufferInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( this );
    }

    operator VkCopyImageToBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyImageToBufferInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyImageToBufferInfo2KHR const & ) const = default;
#else
    bool operator==( CopyImageToBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) &&
             ( srcImageLayout == rhs.srcImageLayout ) && ( dstBuffer == rhs.dstBuffer ) &&
             ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions );
    }

    bool operator!=( CopyImageToBufferInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType          = StructureType::eCopyImageToBufferInfo2KHR;
    const void *                                      pNext          = {};
    VULKAN_HPP_NAMESPACE::Image                       srcImage       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout                 srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::Buffer                      dstBuffer      = {};
    uint32_t                                          regionCount    = {};
    const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR * pRegions       = {};
  };
  static_assert( sizeof( CopyImageToBufferInfo2KHR ) == sizeof( VkCopyImageToBufferInfo2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CopyImageToBufferInfo2KHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2KHR>
  {
    using Type = CopyImageToBufferInfo2KHR;
  };

  struct CopyMemoryToAccelerationStructureInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eCopyMemoryToAccelerationStructureInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    CopyMemoryToAccelerationStructureInfoKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR      src_ = {},
                                              VULKAN_HPP_NAMESPACE::AccelerationStructureKHR         dst_ = {},
                                              VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ =
                                                VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone )
      VULKAN_HPP_NOEXCEPT
      : src( src_ )
      , dst( dst_ )
      , mode( mode_ )
    {}

    CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : CopyMemoryToAccelerationStructureInfoKHR(
          *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    CopyMemoryToAccelerationStructureInfoKHR &
      operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyMemoryToAccelerationStructureInfoKHR &
      operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
      return *this;
    }

    CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CopyMemoryToAccelerationStructureInfoKHR &
      setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
    {
      src = src_;
      return *this;
    }

    CopyMemoryToAccelerationStructureInfoKHR &
      setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
    {
      dst = dst_;
      return *this;
    }

    CopyMemoryToAccelerationStructureInfoKHR &
      setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
    }

    operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
    const void *                                      pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src   = {};
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR    dst   = {};
    VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode =
      VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
  };
  static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) ==
                   sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CopyMemoryToAccelerationStructureInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
  {
    using Type = CopyMemoryToAccelerationStructureInfoKHR;
  };

  class CuFunctionNVX
  {
  public:
    using CType = VkCuFunctionNVX;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX;

  public:
    VULKAN_HPP_CONSTEXPR         CuFunctionNVX() = default;
    VULKAN_HPP_CONSTEXPR         CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT
      : m_cuFunctionNVX( cuFunctionNVX )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    CuFunctionNVX & operator=( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT
    {
      m_cuFunctionNVX = cuFunctionNVX;
      return *this;
    }
#endif

    CuFunctionNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_cuFunctionNVX = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CuFunctionNVX const & ) const = default;
#else
    bool operator==( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_cuFunctionNVX == rhs.m_cuFunctionNVX;
    }

    bool operator!=( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_cuFunctionNVX != rhs.m_cuFunctionNVX;
    }

    bool operator<( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_cuFunctionNVX < rhs.m_cuFunctionNVX;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT
    {
      return m_cuFunctionNVX;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_cuFunctionNVX != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_cuFunctionNVX == VK_NULL_HANDLE;
    }

  private:
    VkCuFunctionNVX m_cuFunctionNVX = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eCuFunctionNVX>
  {
    using type = VULKAN_HPP_NAMESPACE::CuFunctionNVX;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX>
  {
    using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX>
  {
    using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct CuLaunchInfoNVX
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_       = {},
                                          uint32_t                            gridDimX_       = {},
                                          uint32_t                            gridDimY_       = {},
                                          uint32_t                            gridDimZ_       = {},
                                          uint32_t                            blockDimX_      = {},
                                          uint32_t                            blockDimY_      = {},
                                          uint32_t                            blockDimZ_      = {},
                                          uint32_t                            sharedMemBytes_ = {},
                                          size_t                              paramCount_     = {},
                                          const void * const *                pParams_        = {},
                                          size_t                              extraCount_     = {},
                                          const void * const *                pExtras_        = {} ) VULKAN_HPP_NOEXCEPT
      : function( function_ )
      , gridDimX( gridDimX_ )
      , gridDimY( gridDimY_ )
      , gridDimZ( gridDimZ_ )
      , blockDimX( blockDimX_ )
      , blockDimY( blockDimY_ )
      , blockDimZ( blockDimZ_ )
      , sharedMemBytes( sharedMemBytes_ )
      , paramCount( paramCount_ )
      , pParams( pParams_ )
      , extraCount( extraCount_ )
      , pExtras( pExtras_ )
    {}

    VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : CuLaunchInfoNVX( *reinterpret_cast<CuLaunchInfoNVX const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX const *>( &rhs );
      return *this;
    }

    CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT
    {
      function = function_;
      return *this;
    }

    CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT
    {
      gridDimX = gridDimX_;
      return *this;
    }

    CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT
    {
      gridDimY = gridDimY_;
      return *this;
    }

    CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT
    {
      gridDimZ = gridDimZ_;
      return *this;
    }

    CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT
    {
      blockDimX = blockDimX_;
      return *this;
    }

    CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT
    {
      blockDimY = blockDimY_;
      return *this;
    }

    CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT
    {
      blockDimZ = blockDimZ_;
      return *this;
    }

    CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT
    {
      sharedMemBytes = sharedMemBytes_;
      return *this;
    }

    CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT
    {
      paramCount = paramCount_;
      return *this;
    }

    CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT
    {
      pParams = pParams_;
      return *this;
    }

    CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT
    {
      extraCount = extraCount_;
      return *this;
    }

    CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT
    {
      pExtras = pExtras_;
      return *this;
    }

    operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCuLaunchInfoNVX *>( this );
    }

    operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCuLaunchInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CuLaunchInfoNVX const & ) const = default;
#else
    bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) &&
             ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && ( gridDimZ == rhs.gridDimZ ) &&
             ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) &&
             ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) &&
             ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && ( pExtras == rhs.pExtras );
    }

    bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eCuLaunchInfoNVX;
    const void *                        pNext          = {};
    VULKAN_HPP_NAMESPACE::CuFunctionNVX function       = {};
    uint32_t                            gridDimX       = {};
    uint32_t                            gridDimY       = {};
    uint32_t                            gridDimZ       = {};
    uint32_t                            blockDimX      = {};
    uint32_t                            blockDimY      = {};
    uint32_t                            blockDimZ      = {};
    uint32_t                            sharedMemBytes = {};
    size_t                              paramCount     = {};
    const void * const *                pParams        = {};
    size_t                              extraCount     = {};
    const void * const *                pExtras        = {};
  };
  static_assert( sizeof( CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CuLaunchInfoNVX>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCuLaunchInfoNVX>
  {
    using Type = CuLaunchInfoNVX;
  };

  struct DebugMarkerMarkerInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char *                 pMarkerName_ = {},
                                                      std::array<float, 4> const & color_ = {} ) VULKAN_HPP_NOEXCEPT
      : pMarkerName( pMarkerName_ )
      , color( color_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT &
                            operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
      return *this;
    }

    DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT
    {
      pMarkerName = pMarkerName_;
      return *this;
    }

    DebugMarkerMarkerInfoEXT & setColor( std::array<float, 4> color_ ) VULKAN_HPP_NOEXCEPT
    {
      color = color_;
      return *this;
    }

    operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( this );
    }

    operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugMarkerMarkerInfoEXT const & ) const = default;
#else
    bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pMarkerName == rhs.pMarkerName ) &&
             ( color == rhs.color );
    }

    bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType       = StructureType::eDebugMarkerMarkerInfoEXT;
    const void *                                   pNext       = {};
    const char *                                   pMarkerName = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color       = {};
  };
  static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
  {
    using Type = DebugMarkerMarkerInfoEXT;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR(
      VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR           flags_               = {},
      VULKAN_HPP_NAMESPACE::Offset2D                      codedOffset_         = {},
      VULKAN_HPP_NAMESPACE::Extent2D                      codedExtent_         = {},
      VULKAN_HPP_NAMESPACE::Buffer                        srcBuffer_           = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                    srcBufferOffset_     = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                    srcBufferRange_      = {},
      VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR       dstPictureResource_  = {},
      const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ = {},
      uint32_t                                            referenceSlotCount_  = {},
      const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_     = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , codedOffset( codedOffset_ )
      , codedExtent( codedExtent_ )
      , srcBuffer( srcBuffer_ )
      , srcBufferOffset( srcBufferOffset_ )
      , srcBufferRange( srcBufferRange_ )
      , dstPictureResource( dstPictureResource_ )
      , pSetupReferenceSlot( pSetupReferenceSlot_ )
      , referenceSlotCount( referenceSlotCount_ )
      , pReferenceSlots( pReferenceSlots_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeInfoKHR( *reinterpret_cast<VideoDecodeInfoKHR const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeInfoKHR(
      VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR           flags_,
      VULKAN_HPP_NAMESPACE::Offset2D                      codedOffset_,
      VULKAN_HPP_NAMESPACE::Extent2D                      codedExtent_,
      VULKAN_HPP_NAMESPACE::Buffer                        srcBuffer_,
      VULKAN_HPP_NAMESPACE::DeviceSize                    srcBufferOffset_,
      VULKAN_HPP_NAMESPACE::DeviceSize                    srcBufferRange_,
      VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR       dstPictureResource_,
      const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
        referenceSlots_ )
      : flags( flags_ )
      , codedOffset( codedOffset_ )
      , codedExtent( codedExtent_ )
      , srcBuffer( srcBuffer_ )
      , srcBufferOffset( srcBufferOffset_ )
      , srcBufferRange( srcBufferRange_ )
      , dstPictureResource( dstPictureResource_ )
      , pSetupReferenceSlot( pSetupReferenceSlot_ )
      , referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) )
      , pReferenceSlots( referenceSlots_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR &
                            operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const *>( &rhs );
      return *this;
    }

    VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VideoDecodeInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      codedOffset = codedOffset_;
      return *this;
    }

    VideoDecodeInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      codedExtent = codedExtent_;
      return *this;
    }

    VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBuffer = srcBuffer_;
      return *this;
    }

    VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBufferOffset = srcBufferOffset_;
      return *this;
    }

    VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBufferRange = srcBufferRange_;
      return *this;
    }

    VideoDecodeInfoKHR & setDstPictureResource(
      VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstPictureResource = dstPictureResource_;
      return *this;
    }

    VideoDecodeInfoKHR & setPSetupReferenceSlot(
      const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetupReferenceSlot = pSetupReferenceSlot_;
      return *this;
    }

    VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = referenceSlotCount_;
      return *this;
    }

    VideoDecodeInfoKHR &
      setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      pReferenceSlots = pReferenceSlots_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeInfoKHR & setReferenceSlots(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
        referenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
      pReferenceSlots    = referenceSlots_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeInfoKHR *>( this );
    }

    operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeInfoKHR const & ) const = default;
#  else
    bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) &&
             ( srcBuffer == rhs.srcBuffer ) && ( srcBufferOffset == rhs.srcBufferOffset ) &&
             ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) &&
             ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) &&
             ( pReferenceSlots == rhs.pReferenceSlots );
    }

    bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType               = StructureType::eVideoDecodeInfoKHR;
    const void *                                        pNext               = {};
    VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR           flags               = {};
    VULKAN_HPP_NAMESPACE::Offset2D                      codedOffset         = {};
    VULKAN_HPP_NAMESPACE::Extent2D                      codedExtent         = {};
    VULKAN_HPP_NAMESPACE::Buffer                        srcBuffer           = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                    srcBufferOffset     = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                    srcBufferRange      = {};
    VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR       dstPictureResource  = {};
    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot = {};
    uint32_t                                            referenceSlotCount  = {};
    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots     = {};
  };
  static_assert( sizeof( VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeInfoKHR>
  {
    using Type = VideoDecodeInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR(
      VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR           flags_                      = {},
      uint32_t                                            qualityLevel_               = {},
      VULKAN_HPP_NAMESPACE::Extent2D                      codedExtent_                = {},
      VULKAN_HPP_NAMESPACE::Buffer                        dstBitstreamBuffer_         = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                    dstBitstreamBufferOffset_   = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                    dstBitstreamBufferMaxRange_ = {},
      VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR       srcPictureResource_         = {},
      const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_        = {},
      uint32_t                                            referenceSlotCount_         = {},
      const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_            = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , qualityLevel( qualityLevel_ )
      , codedExtent( codedExtent_ )
      , dstBitstreamBuffer( dstBitstreamBuffer_ )
      , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ )
      , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ )
      , srcPictureResource( srcPictureResource_ )
      , pSetupReferenceSlot( pSetupReferenceSlot_ )
      , referenceSlotCount( referenceSlotCount_ )
      , pReferenceSlots( pReferenceSlots_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeInfoKHR( *reinterpret_cast<VideoEncodeInfoKHR const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeInfoKHR(
      VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR           flags_,
      uint32_t                                            qualityLevel_,
      VULKAN_HPP_NAMESPACE::Extent2D                      codedExtent_,
      VULKAN_HPP_NAMESPACE::Buffer                        dstBitstreamBuffer_,
      VULKAN_HPP_NAMESPACE::DeviceSize                    dstBitstreamBufferOffset_,
      VULKAN_HPP_NAMESPACE::DeviceSize                    dstBitstreamBufferMaxRange_,
      VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR       srcPictureResource_,
      const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
        referenceSlots_ )
      : flags( flags_ )
      , qualityLevel( qualityLevel_ )
      , codedExtent( codedExtent_ )
      , dstBitstreamBuffer( dstBitstreamBuffer_ )
      , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ )
      , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ )
      , srcPictureResource( srcPictureResource_ )
      , pSetupReferenceSlot( pSetupReferenceSlot_ )
      , referenceSlotCount( static_cast<uint32_t>( referenceSlots_.size() ) )
      , pReferenceSlots( referenceSlots_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR &
                            operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const *>( &rhs );
      return *this;
    }

    VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VideoEncodeInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      qualityLevel = qualityLevel_;
      return *this;
    }

    VideoEncodeInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      codedExtent = codedExtent_;
      return *this;
    }

    VideoEncodeInfoKHR & setDstBitstreamBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBitstreamBuffer = dstBitstreamBuffer_;
      return *this;
    }

    VideoEncodeInfoKHR &
      setDstBitstreamBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBitstreamBufferOffset = dstBitstreamBufferOffset_;
      return *this;
    }

    VideoEncodeInfoKHR &
      setDstBitstreamBufferMaxRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBitstreamBufferMaxRange = dstBitstreamBufferMaxRange_;
      return *this;
    }

    VideoEncodeInfoKHR & setSrcPictureResource(
      VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcPictureResource = srcPictureResource_;
      return *this;
    }

    VideoEncodeInfoKHR & setPSetupReferenceSlot(
      const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetupReferenceSlot = pSetupReferenceSlot_;
      return *this;
    }

    VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = referenceSlotCount_;
      return *this;
    }

    VideoEncodeInfoKHR &
      setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      pReferenceSlots = pReferenceSlots_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeInfoKHR & setReferenceSlots(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR> const &
        referenceSlots_ ) VULKAN_HPP_NOEXCEPT
    {
      referenceSlotCount = static_cast<uint32_t>( referenceSlots_.size() );
      pReferenceSlots    = referenceSlots_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeInfoKHR *>( this );
    }

    operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeInfoKHR const & ) const = default;
#  else
    bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( qualityLevel == rhs.qualityLevel ) && ( codedExtent == rhs.codedExtent ) &&
             ( dstBitstreamBuffer == rhs.dstBitstreamBuffer ) &&
             ( dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset ) &&
             ( dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange ) &&
             ( srcPictureResource == rhs.srcPictureResource ) && ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) &&
             ( referenceSlotCount == rhs.referenceSlotCount ) && ( pReferenceSlots == rhs.pReferenceSlots );
    }

    bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType                      = StructureType::eVideoEncodeInfoKHR;
    const void *                                        pNext                      = {};
    VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR           flags                      = {};
    uint32_t                                            qualityLevel               = {};
    VULKAN_HPP_NAMESPACE::Extent2D                      codedExtent                = {};
    VULKAN_HPP_NAMESPACE::Buffer                        dstBitstreamBuffer         = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                    dstBitstreamBufferOffset   = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                    dstBitstreamBufferMaxRange = {};
    VULKAN_HPP_NAMESPACE::VideoPictureResourceKHR       srcPictureResource         = {};
    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pSetupReferenceSlot        = {};
    uint32_t                                            referenceSlotCount         = {};
    const VULKAN_HPP_NAMESPACE::VideoReferenceSlotKHR * pReferenceSlots            = {};
  };
  static_assert( sizeof( VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeInfoKHR>
  {
    using Type = VideoEncodeInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  struct SubpassEndInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT {}

    VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
      return *this;
    }

    SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassEndInfo *>( this );
    }

    operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassEndInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassEndInfo const & ) const = default;
#else
    bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext );
    }

    bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
    const void *                        pNext = {};
  };
  static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSubpassEndInfo>
  {
    using Type = SubpassEndInfo;
  };
  using SubpassEndInfoKHR = SubpassEndInfo;

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEndCodingInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEndCodingInfoKHR( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEndCodingInfoKHR( *reinterpret_cast<VideoEndCodingInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR &
                            operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR const *>( &rhs );
      return *this;
    }

    VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEndCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEndCodingInfoKHR *>( this );
    }

    operator VkVideoEndCodingInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEndCodingInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEndCodingInfoKHR const & ) const = default;
#  else
    bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
    }

    bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType = StructureType::eVideoEndCodingInfoKHR;
    const void *                                 pNext = {};
    VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags = {};
  };
  static_assert( sizeof( VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEndCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEndCodingInfoKHR>
  {
    using Type = VideoEndCodingInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  class IndirectCommandsLayoutNV
  {
  public:
    using CType = VkIndirectCommandsLayoutNV;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;

  public:
    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default;
    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT
      IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
      : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
    {
      m_indirectCommandsLayoutNV = indirectCommandsLayoutNV;
      return *this;
    }
#endif

    IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_indirectCommandsLayoutNV = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsLayoutNV const & ) const = default;
#else
    bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV;
    }

    bool operator!=( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV;
    }

    bool operator<( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
    {
      return m_indirectCommandsLayoutNV;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_indirectCommandsLayoutNV != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_indirectCommandsLayoutNV == VK_NULL_HANDLE;
    }

  private:
    VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eIndirectCommandsLayoutNV>
  {
    using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
  {
    using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct IndirectCommandsStreamNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer     buffer_ = {},
                                                   VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT
      : buffer( buffer_ )
      , offset( offset_ )
    {}

    VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV &
                            operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
      return *this;
    }

    IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsStreamNV *>( this );
    }

    operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsStreamNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsStreamNV const & ) const = default;
#else
    bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( buffer == rhs.buffer ) && ( offset == rhs.offset );
    }

    bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Buffer     buffer = {};
    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  };
  static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );

  struct GeneratedCommandsInfoNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
      VULKAN_HPP_NAMESPACE::Pipeline          pipeline_          = {},
      VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV         indirectCommandsLayout_ = {},
      uint32_t                                               streamCount_            = {},
      const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_               = {},
      uint32_t                                               sequencesCount_         = {},
      VULKAN_HPP_NAMESPACE::Buffer                           preprocessBuffer_       = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                       preprocessOffset_       = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                       preprocessSize_         = {},
      VULKAN_HPP_NAMESPACE::Buffer                           sequencesCountBuffer_   = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                       sequencesCountOffset_   = {},
      VULKAN_HPP_NAMESPACE::Buffer                           sequencesIndexBuffer_   = {},
      VULKAN_HPP_NAMESPACE::DeviceSize                       sequencesIndexOffset_   = {} ) VULKAN_HPP_NOEXCEPT
      : pipelineBindPoint( pipelineBindPoint_ )
      , pipeline( pipeline_ )
      , indirectCommandsLayout( indirectCommandsLayout_ )
      , streamCount( streamCount_ )
      , pStreams( pStreams_ )
      , sequencesCount( sequencesCount_ )
      , preprocessBuffer( preprocessBuffer_ )
      , preprocessOffset( preprocessOffset_ )
      , preprocessSize( preprocessSize_ )
      , sequencesCountBuffer( sequencesCountBuffer_ )
      , sequencesCountOffset( sequencesCountOffset_ )
      , sequencesIndexBuffer( sequencesIndexBuffer_ )
      , sequencesIndexOffset( sequencesIndexOffset_ )
    {}

    VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GeneratedCommandsInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineBindPoint        pipelineBindPoint_,
      VULKAN_HPP_NAMESPACE::Pipeline                 pipeline_,
      VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const &
                                       streams_,
      uint32_t                         sequencesCount_       = {},
      VULKAN_HPP_NAMESPACE::Buffer     preprocessBuffer_     = {},
      VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_     = {},
      VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_       = {},
      VULKAN_HPP_NAMESPACE::Buffer     sequencesCountBuffer_ = {},
      VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {},
      VULKAN_HPP_NAMESPACE::Buffer     sequencesIndexBuffer_ = {},
      VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
      : pipelineBindPoint( pipelineBindPoint_ )
      , pipeline( pipeline_ )
      , indirectCommandsLayout( indirectCommandsLayout_ )
      , streamCount( static_cast<uint32_t>( streams_.size() ) )
      , pStreams( streams_.data() )
      , sequencesCount( sequencesCount_ )
      , preprocessBuffer( preprocessBuffer_ )
      , preprocessOffset( preprocessOffset_ )
      , preprocessSize( preprocessSize_ )
      , sequencesCountBuffer( sequencesCountBuffer_ )
      , sequencesCountOffset( sequencesCountOffset_ )
      , sequencesIndexBuffer( sequencesIndexBuffer_ )
      , sequencesIndexOffset( sequencesIndexOffset_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV &
                            operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
      return *this;
    }

    GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    GeneratedCommandsInfoNV &
      setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }

    GeneratedCommandsInfoNV & setIndirectCommandsLayout(
      VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectCommandsLayout = indirectCommandsLayout_;
      return *this;
    }

    GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
    {
      streamCount = streamCount_;
      return *this;
    }

    GeneratedCommandsInfoNV &
      setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT
    {
      pStreams = pStreams_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GeneratedCommandsInfoNV & setStreams(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const &
        streams_ ) VULKAN_HPP_NOEXCEPT
    {
      streamCount = static_cast<uint32_t>( streams_.size() );
      pStreams    = streams_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesCount = sequencesCount_;
      return *this;
    }

    GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      preprocessBuffer = preprocessBuffer_;
      return *this;
    }

    GeneratedCommandsInfoNV &
      setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      preprocessOffset = preprocessOffset_;
      return *this;
    }

    GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
    {
      preprocessSize = preprocessSize_;
      return *this;
    }

    GeneratedCommandsInfoNV &
      setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesCountBuffer = sequencesCountBuffer_;
      return *this;
    }

    GeneratedCommandsInfoNV &
      setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesCountOffset = sequencesCountOffset_;
      return *this;
    }

    GeneratedCommandsInfoNV &
      setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesIndexBuffer = sequencesIndexBuffer_;
      return *this;
    }

    GeneratedCommandsInfoNV &
      setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      sequencesIndexOffset = sequencesIndexOffset_;
      return *this;
    }

    operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeneratedCommandsInfoNV *>( this );
    }

    operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeneratedCommandsInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeneratedCommandsInfoNV const & ) const = default;
#else
    bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
             ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) &&
             ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) &&
             ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) &&
             ( preprocessOffset == rhs.preprocessOffset ) && ( preprocessSize == rhs.preprocessSize ) &&
             ( sequencesCountBuffer == rhs.sequencesCountBuffer ) &&
             ( sequencesCountOffset == rhs.sequencesCountOffset ) &&
             ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) &&
             ( sequencesIndexOffset == rhs.sequencesIndexOffset );
    }

    bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType     sType             = StructureType::eGeneratedCommandsInfoNV;
    const void *                            pNext             = {};
    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
    VULKAN_HPP_NAMESPACE::Pipeline          pipeline          = {};
    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV         indirectCommandsLayout = {};
    uint32_t                                               streamCount            = {};
    const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams               = {};
    uint32_t                                               sequencesCount         = {};
    VULKAN_HPP_NAMESPACE::Buffer                           preprocessBuffer       = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                       preprocessOffset       = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                       preprocessSize         = {};
    VULKAN_HPP_NAMESPACE::Buffer                           sequencesCountBuffer   = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                       sequencesCountOffset   = {};
    VULKAN_HPP_NAMESPACE::Buffer                           sequencesIndexBuffer   = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                       sequencesIndexOffset   = {};
  };
  static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
  {
    using Type = GeneratedCommandsInfoNV;
  };

  struct MemoryBarrier
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
                                        VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcAccessMask( srcAccessMask_ )
      , dstAccessMask( dstAccessMask_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
      return *this;
    }

    MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryBarrier *>( this );
    }

    operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryBarrier *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryBarrier const & ) const = default;
#else
    bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) &&
             ( dstAccessMask == rhs.dstAccessMask );
    }

    bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::eMemoryBarrier;
    const void *                        pNext         = {};
    VULKAN_HPP_NAMESPACE::AccessFlags   srcAccessMask = {};
    VULKAN_HPP_NAMESPACE::AccessFlags   dstAccessMask = {};
  };
  static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryBarrier>
  {
    using Type = MemoryBarrier;
  };

  struct ImageMemoryBarrier
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {},
                          VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {},
                          VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
                          VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
                          uint32_t                          srcQueueFamilyIndex_        = {},
                          uint32_t                          dstQueueFamilyIndex_        = {},
                          VULKAN_HPP_NAMESPACE::Image       image_                      = {},
                          VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcAccessMask( srcAccessMask_ )
      , dstAccessMask( dstAccessMask_ )
      , oldLayout( oldLayout_ )
      , newLayout( newLayout_ )
      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
      , image( image_ )
      , subresourceRange( subresourceRange_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier &
                            operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
      return *this;
    }

    ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      oldLayout = oldLayout_;
      return *this;
    }

    ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      newLayout = newLayout_;
      return *this;
    }

    ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      srcQueueFamilyIndex = srcQueueFamilyIndex_;
      return *this;
    }

    ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      dstQueueFamilyIndex = dstQueueFamilyIndex_;
      return *this;
    }

    ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    ImageMemoryBarrier &
      setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
    {
      subresourceRange = subresourceRange_;
      return *this;
    }

    operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageMemoryBarrier *>( this );
    }

    operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageMemoryBarrier *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageMemoryBarrier const & ) const = default;
#else
    bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) &&
             ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) &&
             ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
             ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) &&
             ( subresourceRange == rhs.subresourceRange );
    }

    bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType               = StructureType::eImageMemoryBarrier;
    const void *                                pNext               = {};
    VULKAN_HPP_NAMESPACE::AccessFlags           srcAccessMask       = {};
    VULKAN_HPP_NAMESPACE::AccessFlags           dstAccessMask       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout           oldLayout           = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::ImageLayout           newLayout           = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    uint32_t                                    srcQueueFamilyIndex = {};
    uint32_t                                    dstQueueFamilyIndex = {};
    VULKAN_HPP_NAMESPACE::Image                 image               = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange    = {};
  };
  static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageMemoryBarrier>
  {
    using Type = ImageMemoryBarrier;
  };

  struct MemoryBarrier2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MemoryBarrier2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_  = {},
                         VULKAN_HPP_NAMESPACE::AccessFlags2KHR        srcAccessMask_ = {},
                         VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_  = {},
                         VULKAN_HPP_NAMESPACE::AccessFlags2KHR        dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcStageMask( srcStageMask_ )
      , srcAccessMask( srcAccessMask_ )
      , dstStageMask( dstStageMask_ )
      , dstAccessMask( dstAccessMask_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryBarrier2KHR( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryBarrier2KHR( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryBarrier2KHR( *reinterpret_cast<MemoryBarrier2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2KHR &
                            operator=( MemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryBarrier2KHR & operator=( VkMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR const *>( &rhs );
      return *this;
    }

    MemoryBarrier2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryBarrier2KHR &
      setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    MemoryBarrier2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    MemoryBarrier2KHR &
      setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    MemoryBarrier2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    operator VkMemoryBarrier2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryBarrier2KHR *>( this );
    }

    operator VkMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryBarrier2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryBarrier2KHR const & ) const = default;
#else
    bool operator==( MemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) &&
             ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) &&
             ( dstAccessMask == rhs.dstAccessMask );
    }

    bool operator!=( MemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType         = StructureType::eMemoryBarrier2KHR;
    const void *                                 pNext         = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask  = {};
    VULKAN_HPP_NAMESPACE::AccessFlags2KHR        srcAccessMask = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask  = {};
    VULKAN_HPP_NAMESPACE::AccessFlags2KHR        dstAccessMask = {};
  };
  static_assert( sizeof( MemoryBarrier2KHR ) == sizeof( VkMemoryBarrier2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryBarrier2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryBarrier2KHR>
  {
    using Type = MemoryBarrier2KHR;
  };

  struct ImageMemoryBarrier2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR(
      VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_        = {},
      VULKAN_HPP_NAMESPACE::AccessFlags2KHR        srcAccessMask_       = {},
      VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_        = {},
      VULKAN_HPP_NAMESPACE::AccessFlags2KHR        dstAccessMask_       = {},
      VULKAN_HPP_NAMESPACE::ImageLayout            oldLayout_           = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      VULKAN_HPP_NAMESPACE::ImageLayout            newLayout_           = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      uint32_t                                     srcQueueFamilyIndex_ = {},
      uint32_t                                     dstQueueFamilyIndex_ = {},
      VULKAN_HPP_NAMESPACE::Image                  image_               = {},
      VULKAN_HPP_NAMESPACE::ImageSubresourceRange  subresourceRange_    = {} ) VULKAN_HPP_NOEXCEPT
      : srcStageMask( srcStageMask_ )
      , srcAccessMask( srcAccessMask_ )
      , dstStageMask( dstStageMask_ )
      , dstAccessMask( dstAccessMask_ )
      , oldLayout( oldLayout_ )
      , newLayout( newLayout_ )
      , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
      , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
      , image( image_ )
      , subresourceRange( subresourceRange_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2KHR( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageMemoryBarrier2KHR( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageMemoryBarrier2KHR( *reinterpret_cast<ImageMemoryBarrier2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2KHR &
                            operator=( ImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageMemoryBarrier2KHR & operator=( VkImageMemoryBarrier2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR const *>( &rhs );
      return *this;
    }

    ImageMemoryBarrier2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageMemoryBarrier2KHR &
      setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    ImageMemoryBarrier2KHR &
      setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    ImageMemoryBarrier2KHR &
      setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    ImageMemoryBarrier2KHR &
      setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2KHR dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    ImageMemoryBarrier2KHR & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      oldLayout = oldLayout_;
      return *this;
    }

    ImageMemoryBarrier2KHR & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      newLayout = newLayout_;
      return *this;
    }

    ImageMemoryBarrier2KHR & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      srcQueueFamilyIndex = srcQueueFamilyIndex_;
      return *this;
    }

    ImageMemoryBarrier2KHR & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      dstQueueFamilyIndex = dstQueueFamilyIndex_;
      return *this;
    }

    ImageMemoryBarrier2KHR & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    ImageMemoryBarrier2KHR &
      setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
    {
      subresourceRange = subresourceRange_;
      return *this;
    }

    operator VkImageMemoryBarrier2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageMemoryBarrier2KHR *>( this );
    }

    operator VkImageMemoryBarrier2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageMemoryBarrier2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageMemoryBarrier2KHR const & ) const = default;
#else
    bool operator==( ImageMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) &&
             ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) &&
             ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) &&
             ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) &&
             ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) &&
             ( subresourceRange == rhs.subresourceRange );
    }

    bool operator!=( ImageMemoryBarrier2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType               = StructureType::eImageMemoryBarrier2KHR;
    const void *                                 pNext               = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR srcStageMask        = {};
    VULKAN_HPP_NAMESPACE::AccessFlags2KHR        srcAccessMask       = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR dstStageMask        = {};
    VULKAN_HPP_NAMESPACE::AccessFlags2KHR        dstAccessMask       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout            oldLayout           = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::ImageLayout            newLayout           = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    uint32_t                                     srcQueueFamilyIndex = {};
    uint32_t                                     dstQueueFamilyIndex = {};
    VULKAN_HPP_NAMESPACE::Image                  image               = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceRange  subresourceRange    = {};
  };
  static_assert( sizeof( ImageMemoryBarrier2KHR ) == sizeof( VkImageMemoryBarrier2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageMemoryBarrier2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageMemoryBarrier2KHR>
  {
    using Type = ImageMemoryBarrier2KHR;
  };

  struct DependencyInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DependencyInfoKHR(
      VULKAN_HPP_NAMESPACE::DependencyFlags                 dependencyFlags_          = {},
      uint32_t                                              memoryBarrierCount_       = {},
      const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR *       pMemoryBarriers_          = {},
      uint32_t                                              bufferMemoryBarrierCount_ = {},
      const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR * pBufferMemoryBarriers_    = {},
      uint32_t                                              imageMemoryBarrierCount_  = {},
      const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR *  pImageMemoryBarriers_     = {} ) VULKAN_HPP_NOEXCEPT
      : dependencyFlags( dependencyFlags_ )
      , memoryBarrierCount( memoryBarrierCount_ )
      , pMemoryBarriers( pMemoryBarriers_ )
      , bufferMemoryBarrierCount( bufferMemoryBarrierCount_ )
      , pBufferMemoryBarriers( pBufferMemoryBarriers_ )
      , imageMemoryBarrierCount( imageMemoryBarrierCount_ )
      , pImageMemoryBarriers( pImageMemoryBarriers_ )
    {}

    VULKAN_HPP_CONSTEXPR DependencyInfoKHR( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DependencyInfoKHR( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DependencyInfoKHR( *reinterpret_cast<DependencyInfoKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DependencyInfoKHR(
      VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR> const &
        memoryBarriers_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR> const &
        bufferMemoryBarriers_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR> const &
        imageMemoryBarriers_ = {} )
      : dependencyFlags( dependencyFlags_ )
      , memoryBarrierCount( static_cast<uint32_t>( memoryBarriers_.size() ) )
      , pMemoryBarriers( memoryBarriers_.data() )
      , bufferMemoryBarrierCount( static_cast<uint32_t>( bufferMemoryBarriers_.size() ) )
      , pBufferMemoryBarriers( bufferMemoryBarriers_.data() )
      , imageMemoryBarrierCount( static_cast<uint32_t>( imageMemoryBarriers_.size() ) )
      , pImageMemoryBarriers( imageMemoryBarriers_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DependencyInfoKHR &
                            operator=( DependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DependencyInfoKHR & operator=( VkDependencyInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DependencyInfoKHR const *>( &rhs );
      return *this;
    }

    DependencyInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DependencyInfoKHR & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyFlags = dependencyFlags_;
      return *this;
    }

    DependencyInfoKHR & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryBarrierCount = memoryBarrierCount_;
      return *this;
    }

    DependencyInfoKHR &
      setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      pMemoryBarriers = pMemoryBarriers_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DependencyInfoKHR & setMemoryBarriers(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR> const &
        memoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryBarrierCount = static_cast<uint32_t>( memoryBarriers_.size() );
      pMemoryBarriers    = memoryBarriers_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DependencyInfoKHR & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferMemoryBarrierCount = bufferMemoryBarrierCount_;
      return *this;
    }

    DependencyInfoKHR & setPBufferMemoryBarriers(
      const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      pBufferMemoryBarriers = pBufferMemoryBarriers_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DependencyInfoKHR & setBufferMemoryBarriers(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR> const &
        bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferMemoryBarrierCount = static_cast<uint32_t>( bufferMemoryBarriers_.size() );
      pBufferMemoryBarriers    = bufferMemoryBarriers_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DependencyInfoKHR & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT
    {
      imageMemoryBarrierCount = imageMemoryBarrierCount_;
      return *this;
    }

    DependencyInfoKHR & setPImageMemoryBarriers(
      const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageMemoryBarriers = pImageMemoryBarriers_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DependencyInfoKHR & setImageMemoryBarriers(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR> const &
        imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT
    {
      imageMemoryBarrierCount = static_cast<uint32_t>( imageMemoryBarriers_.size() );
      pImageMemoryBarriers    = imageMemoryBarriers_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDependencyInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDependencyInfoKHR *>( this );
    }

    operator VkDependencyInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDependencyInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DependencyInfoKHR const & ) const = default;
#else
    bool operator==( DependencyInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) &&
             ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) &&
             ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) &&
             ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) &&
             ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) &&
             ( pImageMemoryBarriers == rhs.pImageMemoryBarriers );
    }

    bool operator!=( DependencyInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType                    = StructureType::eDependencyInfoKHR;
    const void *                                          pNext                    = {};
    VULKAN_HPP_NAMESPACE::DependencyFlags                 dependencyFlags          = {};
    uint32_t                                              memoryBarrierCount       = {};
    const VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR *       pMemoryBarriers          = {};
    uint32_t                                              bufferMemoryBarrierCount = {};
    const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR * pBufferMemoryBarriers    = {};
    uint32_t                                              imageMemoryBarrierCount  = {};
    const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR *  pImageMemoryBarriers     = {};
  };
  static_assert( sizeof( DependencyInfoKHR ) == sizeof( VkDependencyInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DependencyInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDependencyInfoKHR>
  {
    using Type = DependencyInfoKHR;
  };

  class Sampler
  {
  public:
    using CType = VkSampler;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;

  public:
    VULKAN_HPP_CONSTEXPR         Sampler() = default;
    VULKAN_HPP_CONSTEXPR         Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT : m_sampler( sampler ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Sampler & operator=( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
    {
      m_sampler = sampler;
      return *this;
    }
#endif

    Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_sampler = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Sampler const & ) const = default;
#else
    bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_sampler == rhs.m_sampler;
    }

    bool operator!=( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_sampler != rhs.m_sampler;
    }

    bool operator<( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_sampler < rhs.m_sampler;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
    {
      return m_sampler;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_sampler != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_sampler == VK_NULL_HANDLE;
    }

  private:
    VkSampler m_sampler = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSampler>
  {
    using type = VULKAN_HPP_NAMESPACE::Sampler;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
  {
    using Type = VULKAN_HPP_NAMESPACE::Sampler;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
  {
    using Type = VULKAN_HPP_NAMESPACE::Sampler;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Sampler>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct DescriptorImageInfo
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler     sampler_   = {},
                                              VULKAN_HPP_NAMESPACE::ImageView   imageView_ = {},
                                              VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ =
                                                VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT
      : sampler( sampler_ )
      , imageView( imageView_ )
      , imageLayout( imageLayout_ )
    {}

    VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo &
                            operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
      return *this;
    }

    DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
    {
      sampler = sampler_;
      return *this;
    }

    DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView = imageView_;
      return *this;
    }

    DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      imageLayout = imageLayout_;
      return *this;
    }

    operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorImageInfo *>( this );
    }

    operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorImageInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorImageInfo const & ) const = default;
#else
    bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout );
    }

    bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Sampler     sampler     = {};
    VULKAN_HPP_NAMESPACE::ImageView   imageView   = {};
    VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  };
  static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );

  struct DescriptorBufferInfo
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer     buffer_ = {},
                                               VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {},
                                               VULKAN_HPP_NAMESPACE::DeviceSize range_  = {} ) VULKAN_HPP_NOEXCEPT
      : buffer( buffer_ )
      , offset( offset_ )
      , range( range_ )
    {}

    VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo &
                            operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
      return *this;
    }

    DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
    {
      range = range_;
      return *this;
    }

    operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorBufferInfo *>( this );
    }

    operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorBufferInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorBufferInfo const & ) const = default;
#else
    bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range );
    }

    bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Buffer     buffer = {};
    VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
    VULKAN_HPP_NAMESPACE::DeviceSize range  = {};
  };
  static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );

  class BufferView
  {
  public:
    using CType = VkBufferView;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;

  public:
    VULKAN_HPP_CONSTEXPR         BufferView() = default;
    VULKAN_HPP_CONSTEXPR         BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
    {
      m_bufferView = bufferView;
      return *this;
    }
#endif

    BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_bufferView = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( BufferView const & ) const = default;
#else
    bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_bufferView == rhs.m_bufferView;
    }

    bool operator!=( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_bufferView != rhs.m_bufferView;
    }

    bool operator<( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_bufferView < rhs.m_bufferView;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
    {
      return m_bufferView;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_bufferView != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_bufferView == VK_NULL_HANDLE;
    }

  private:
    VkBufferView m_bufferView = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eBufferView>
  {
    using type = VULKAN_HPP_NAMESPACE::BufferView;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
  {
    using Type = VULKAN_HPP_NAMESPACE::BufferView;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
  {
    using Type = VULKAN_HPP_NAMESPACE::BufferView;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferView>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct WriteDescriptorSet
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WriteDescriptorSet(
      VULKAN_HPP_NAMESPACE::DescriptorSet  dstSet_                    = {},
      uint32_t                             dstBinding_                = {},
      uint32_t                             dstArrayElement_           = {},
      uint32_t                             descriptorCount_           = {},
      VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_            = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
      const VULKAN_HPP_NAMESPACE::DescriptorImageInfo *  pImageInfo_  = {},
      const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {},
      const VULKAN_HPP_NAMESPACE::BufferView *           pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT
      : dstSet( dstSet_ )
      , dstBinding( dstBinding_ )
      , dstArrayElement( dstArrayElement_ )
      , descriptorCount( descriptorCount_ )
      , descriptorType( descriptorType_ )
      , pImageInfo( pImageInfo_ )
      , pBufferInfo( pBufferInfo_ )
      , pTexelBufferView( pTexelBufferView_ )
    {}

    VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
      : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSet(
      VULKAN_HPP_NAMESPACE::DescriptorSet  dstSet_,
      uint32_t                             dstBinding_,
      uint32_t                             dstArrayElement_,
      VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const &
        bufferInfo_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const &
        texelBufferView_ = {} )
      : dstSet( dstSet_ )
      , dstBinding( dstBinding_ )
      , dstArrayElement( dstArrayElement_ )
      , descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size()
                                                                    : !bufferInfo_.empty() ? bufferInfo_.size()
                                                                                           : texelBufferView_.size() ) )
      , descriptorType( descriptorType_ )
      , pImageInfo( imageInfo_.data() )
      , pBufferInfo( bufferInfo_.data() )
      , pTexelBufferView( texelBufferView_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 );
#    else
      if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet &
                            operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
      return *this;
    }

    WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSet = dstSet_;
      return *this;
    }

    WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBinding = dstBinding_;
      return *this;
    }

    WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
    {
      dstArrayElement = dstArrayElement_;
      return *this;
    }

    WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }

    WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorType = descriptorType_;
      return *this;
    }

    WriteDescriptorSet &
      setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageInfo = pImageInfo_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSet & setImageInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const &
        imageInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
      pImageInfo      = imageInfo_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    WriteDescriptorSet &
      setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pBufferInfo = pBufferInfo_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSet & setBufferInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const &
        bufferInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
      pBufferInfo     = bufferInfo_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    WriteDescriptorSet &
      setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
    {
      pTexelBufferView = pTexelBufferView_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSet & setTexelBufferView(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ )
      VULKAN_HPP_NOEXCEPT
    {
      descriptorCount  = static_cast<uint32_t>( texelBufferView_.size() );
      pTexelBufferView = texelBufferView_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSet *>( this );
    }

    operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSet *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSet const & ) const = default;
#else
    bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) &&
             ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) &&
             ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) &&
             ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) &&
             ( pTexelBufferView == rhs.pTexelBufferView );
    }

    bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType           = StructureType::eWriteDescriptorSet;
    const void *                                       pNext           = {};
    VULKAN_HPP_NAMESPACE::DescriptorSet                dstSet          = {};
    uint32_t                                           dstBinding      = {};
    uint32_t                                           dstArrayElement = {};
    uint32_t                                           descriptorCount = {};
    VULKAN_HPP_NAMESPACE::DescriptorType               descriptorType  = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
    const VULKAN_HPP_NAMESPACE::DescriptorImageInfo *  pImageInfo      = {};
    const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo     = {};
    const VULKAN_HPP_NAMESPACE::BufferView *           pTexelBufferView = {};
  };
  static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSet>
  {
    using Type = WriteDescriptorSet;
  };

  class DescriptorUpdateTemplate
  {
  public:
    using CType = VkDescriptorUpdateTemplate;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;

  public:
    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default;
    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT
      DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
      : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
    {
      m_descriptorUpdateTemplate = descriptorUpdateTemplate;
      return *this;
    }
#endif

    DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_descriptorUpdateTemplate = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorUpdateTemplate const & ) const = default;
#else
    bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
    }

    bool operator!=( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
    }

    bool operator<( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorUpdateTemplate;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
    }

  private:
    VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDescriptorUpdateTemplate>
  {
    using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
  {
    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
  {
    using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };
  using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;

  class Event
  {
  public:
    using CType = VkEvent;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;

  public:
    VULKAN_HPP_CONSTEXPR         Event() = default;
    VULKAN_HPP_CONSTEXPR         Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT : m_event( event ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Event & operator=( VkEvent event ) VULKAN_HPP_NOEXCEPT
    {
      m_event = event;
      return *this;
    }
#endif

    Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_event = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Event const & ) const = default;
#else
    bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_event == rhs.m_event;
    }

    bool operator!=( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_event != rhs.m_event;
    }

    bool operator<( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_event < rhs.m_event;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
    {
      return m_event;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_event != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_event == VK_NULL_HANDLE;
    }

  private:
    VkEvent m_event = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eEvent>
  {
    using type = VULKAN_HPP_NAMESPACE::Event;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
  {
    using Type = VULKAN_HPP_NAMESPACE::Event;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent>
  {
    using Type = VULKAN_HPP_NAMESPACE::Event;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Event>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct ImageResolve
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
                                       VULKAN_HPP_NAMESPACE::Offset3D               srcOffset_      = {},
                                       VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
                                       VULKAN_HPP_NAMESPACE::Offset3D               dstOffset_      = {},
                                       VULKAN_HPP_NAMESPACE::Extent3D               extent_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubresource( srcSubresource_ )
      , srcOffset( srcOffset_ )
      , dstSubresource( dstSubresource_ )
      , dstOffset( dstOffset_ )
      , extent( extent_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
      return *this;
    }

    ImageResolve &
      setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    ImageResolve &
      setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageResolve *>( this );
    }

    operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageResolve *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageResolve const & ) const = default;
#else
    bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) &&
             ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
    }

    bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
    VULKAN_HPP_NAMESPACE::Offset3D               srcOffset      = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
    VULKAN_HPP_NAMESPACE::Offset3D               dstOffset      = {};
    VULKAN_HPP_NAMESPACE::Extent3D               extent         = {};
  };
  static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );

  struct ImageResolve2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageResolve2KHR( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {},
                                           VULKAN_HPP_NAMESPACE::Offset3D               srcOffset_      = {},
                                           VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {},
                                           VULKAN_HPP_NAMESPACE::Offset3D               dstOffset_      = {},
                                           VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubresource( srcSubresource_ )
      , srcOffset( srcOffset_ )
      , dstSubresource( dstSubresource_ )
      , dstOffset( dstOffset_ )
      , extent( extent_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageResolve2KHR( *reinterpret_cast<ImageResolve2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2KHR const *>( &rhs );
      return *this;
    }

    ImageResolve2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageResolve2KHR &
      setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubresource = srcSubresource_;
      return *this;
    }

    ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      srcOffset = srcOffset_;
      return *this;
    }

    ImageResolve2KHR &
      setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubresource = dstSubresource_;
      return *this;
    }

    ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      dstOffset = dstOffset_;
      return *this;
    }

    ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    operator VkImageResolve2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageResolve2KHR *>( this );
    }

    operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageResolve2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageResolve2KHR const & ) const = default;
#else
    bool operator==( ImageResolve2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) &&
             ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) &&
             ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent );
    }

    bool operator!=( ImageResolve2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType          = StructureType::eImageResolve2KHR;
    const void *                                 pNext          = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
    VULKAN_HPP_NAMESPACE::Offset3D               srcOffset      = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
    VULKAN_HPP_NAMESPACE::Offset3D               dstOffset      = {};
    VULKAN_HPP_NAMESPACE::Extent3D               extent         = {};
  };
  static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageResolve2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageResolve2KHR>
  {
    using Type = ImageResolve2KHR;
  };

  struct ResolveImageInfo2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR(
      VULKAN_HPP_NAMESPACE::Image                    srcImage_       = {},
      VULKAN_HPP_NAMESPACE::ImageLayout              srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      VULKAN_HPP_NAMESPACE::Image                    dstImage_       = {},
      VULKAN_HPP_NAMESPACE::ImageLayout              dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined,
      uint32_t                                       regionCount_    = {},
      const VULKAN_HPP_NAMESPACE::ImageResolve2KHR * pRegions_       = {} ) VULKAN_HPP_NOEXCEPT
      : srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( regionCount_ )
      , pRegions( pRegions_ )
    {}

    VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ResolveImageInfo2KHR( *reinterpret_cast<ResolveImageInfo2KHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ResolveImageInfo2KHR(
      VULKAN_HPP_NAMESPACE::Image       srcImage_,
      VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_,
      VULKAN_HPP_NAMESPACE::Image       dstImage_,
      VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ )
      : srcImage( srcImage_ )
      , srcImageLayout( srcImageLayout_ )
      , dstImage( dstImage_ )
      , dstImageLayout( dstImageLayout_ )
      , regionCount( static_cast<uint32_t>( regions_.size() ) )
      , pRegions( regions_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2KHR &
                            operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR const *>( &rhs );
      return *this;
    }

    ResolveImageInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImage = srcImage_;
      return *this;
    }

    ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      srcImageLayout = srcImageLayout_;
      return *this;
    }

    ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImage = dstImage_;
      return *this;
    }

    ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      dstImageLayout = dstImageLayout_;
      return *this;
    }

    ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      regionCount = regionCount_;
      return *this;
    }

    ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ResolveImageInfo2KHR & setRegions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ )
      VULKAN_HPP_NOEXCEPT
    {
      regionCount = static_cast<uint32_t>( regions_.size() );
      pRegions    = regions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkResolveImageInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkResolveImageInfo2KHR *>( this );
    }

    operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkResolveImageInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ResolveImageInfo2KHR const & ) const = default;
#else
    bool operator==( ResolveImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) &&
             ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) &&
             ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) &&
             ( pRegions == rhs.pRegions );
    }

    bool operator!=( ResolveImageInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType          = StructureType::eResolveImageInfo2KHR;
    const void *                                   pNext          = {};
    VULKAN_HPP_NAMESPACE::Image                    srcImage       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout              srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    VULKAN_HPP_NAMESPACE::Image                    dstImage       = {};
    VULKAN_HPP_NAMESPACE::ImageLayout              dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
    uint32_t                                       regionCount    = {};
    const VULKAN_HPP_NAMESPACE::ImageResolve2KHR * pRegions       = {};
  };
  static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ResolveImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eResolveImageInfo2KHR>
  {
    using Type = ResolveImageInfo2KHR;
  };

  struct PerformanceMarkerInfoINTEL
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT : marker( marker_ ) {}

    VULKAN_HPP_CONSTEXPR
      PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL &
                            operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
      return *this;
    }

    PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
    {
      marker = marker_;
      return *this;
    }

    operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( this );
    }

    operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceMarkerInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default;
#else
    bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker );
    }

    bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::ePerformanceMarkerInfoINTEL;
    const void *                        pNext  = {};
    uint64_t                            marker = {};
  };
  static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
  {
    using Type = PerformanceMarkerInfoINTEL;
  };

  struct PerformanceOverrideInfoINTEL
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ =
                                      VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware,
                                    VULKAN_HPP_NAMESPACE::Bool32 enable_    = {},
                                    uint64_t                     parameter_ = {} ) VULKAN_HPP_NOEXCEPT
      : type( type_ )
      , enable( enable_ )
      , parameter( parameter_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL &
                            operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
      return *this;
    }

    PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PerformanceOverrideInfoINTEL &
      setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
    {
      enable = enable_;
      return *this;
    }

    PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
    {
      parameter = parameter_;
      return *this;
    }

    operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( this );
    }

    operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceOverrideInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default;
#else
    bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) &&
             ( parameter == rhs.parameter );
    }

    bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType = StructureType::ePerformanceOverrideInfoINTEL;
    const void *                                       pNext = {};
    VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type =
      VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
    VULKAN_HPP_NAMESPACE::Bool32 enable    = {};
    uint64_t                     parameter = {};
  };
  static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
  {
    using Type = PerformanceOverrideInfoINTEL;
  };

  struct PerformanceStreamMarkerInfoINTEL
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT
      : marker( marker_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL &
                            operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
      return *this;
    }

    PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
    {
      marker = marker_;
      return *this;
    }

    operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( this );
    }

    operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default;
#else
    bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker );
    }

    bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::ePerformanceStreamMarkerInfoINTEL;
    const void *                        pNext  = {};
    uint32_t                            marker = {};
  };
  static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
  {
    using Type = PerformanceStreamMarkerInfoINTEL;
  };

  struct VertexInputBindingDescription2EXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVertexInputBindingDescription2EXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT(
      uint32_t                              binding_   = {},
      uint32_t                              stride_    = {},
      VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex,
      uint32_t                              divisor_   = {} ) VULKAN_HPP_NOEXCEPT
      : binding( binding_ )
      , stride( stride_ )
      , inputRate( inputRate_ )
      , divisor( divisor_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputBindingDescription2EXT( *reinterpret_cast<VertexInputBindingDescription2EXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT &
                            operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const *>( &rhs );
      return *this;
    }

    VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    VertexInputBindingDescription2EXT &
      setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
    {
      inputRate = inputRate_;
      return *this;
    }

    VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
    {
      divisor = divisor_;
      return *this;
    }

    operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( this );
    }

    operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputBindingDescription2EXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default;
#else
    bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) &&
             ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) && ( divisor == rhs.divisor );
    }

    bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType     = StructureType::eVertexInputBindingDescription2EXT;
    void *                                pNext     = {};
    uint32_t                              binding   = {};
    uint32_t                              stride    = {};
    VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
    uint32_t                              divisor   = {};
  };
  static_assert( sizeof( VertexInputBindingDescription2EXT ) == sizeof( VkVertexInputBindingDescription2EXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VertexInputBindingDescription2EXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVertexInputBindingDescription2EXT>
  {
    using Type = VertexInputBindingDescription2EXT;
  };

  struct VertexInputAttributeDescription2EXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVertexInputAttributeDescription2EXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT(
      uint32_t                     location_ = {},
      uint32_t                     binding_  = {},
      VULKAN_HPP_NAMESPACE::Format format_   = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      uint32_t                     offset_   = {} ) VULKAN_HPP_NOEXCEPT
      : location( location_ )
      , binding( binding_ )
      , format( format_ )
      , offset( offset_ )
    {}

    VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputAttributeDescription2EXT( *reinterpret_cast<VertexInputAttributeDescription2EXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT &
                            operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputAttributeDescription2EXT &
      operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const *>( &rhs );
      return *this;
    }

    VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
    {
      location = location_;
      return *this;
    }

    VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( this );
    }

    operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputAttributeDescription2EXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default;
#else
    bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) &&
             ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset );
    }

    bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::eVertexInputAttributeDescription2EXT;
    void *                              pNext    = {};
    uint32_t                            location = {};
    uint32_t                            binding  = {};
    VULKAN_HPP_NAMESPACE::Format        format   = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    uint32_t                            offset   = {};
  };
  static_assert( sizeof( VertexInputAttributeDescription2EXT ) == sizeof( VkVertexInputAttributeDescription2EXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VertexInputAttributeDescription2EXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVertexInputAttributeDescription2EXT>
  {
    using Type = VertexInputAttributeDescription2EXT;
  };

  struct ShadingRatePaletteNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(
      uint32_t                                                shadingRatePaletteEntryCount_ = {},
      const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_   = {} ) VULKAN_HPP_NOEXCEPT
      : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ )
      , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
    {}

    VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShadingRatePaletteNV(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const &
        shadingRatePaletteEntries_ )
      : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) )
      , pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV &
                            operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
      return *this;
    }

    ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
      return *this;
    }

    ShadingRatePaletteNV & setPShadingRatePaletteEntries(
      const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShadingRatePaletteNV & setShadingRatePaletteEntries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const &
        shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
      pShadingRatePaletteEntries   = shadingRatePaletteEntries_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShadingRatePaletteNV *>( this );
    }

    operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShadingRatePaletteNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShadingRatePaletteNV const & ) const = default;
#else
    bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) &&
             ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
    }

    bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                                shadingRatePaletteEntryCount = {};
    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries   = {};
  };
  static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );

  struct ViewportWScalingNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT
      : xcoeff( xcoeff_ )
      , ycoeff( ycoeff_ )
    {}

    VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV &
                            operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
      return *this;
    }

    ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
    {
      xcoeff = xcoeff_;
      return *this;
    }

    ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
    {
      ycoeff = ycoeff_;
      return *this;
    }

    operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkViewportWScalingNV *>( this );
    }

    operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkViewportWScalingNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ViewportWScalingNV const & ) const = default;
#else
    bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff );
    }

    bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float xcoeff = {};
    float ycoeff = {};
  };
  static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );

  struct StridedDeviceAddressRegionKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
                                     VULKAN_HPP_NAMESPACE::DeviceSize    stride_        = {},
                                     VULKAN_HPP_NAMESPACE::DeviceSize    size_          = {} ) VULKAN_HPP_NOEXCEPT
      : deviceAddress( deviceAddress_ )
      , stride( stride_ )
      , size( size_ )
    {}

    VULKAN_HPP_CONSTEXPR
      StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR &
                            operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const *>( &rhs );
      return *this;
    }

    StridedDeviceAddressRegionKHR &
      setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceAddress = deviceAddress_;
      return *this;
    }

    StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( this );
    }

    operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkStridedDeviceAddressRegionKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default;
#else
    bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size );
    }

    bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    stride        = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    size          = {};
  };
  static_assert( sizeof( StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<StridedDeviceAddressRegionKHR>::value,
                 "struct wrapper is not a standard layout!" );

  class CommandBuffer
  {
  public:
    using CType = VkCommandBuffer;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;

  public:
    VULKAN_HPP_CONSTEXPR         CommandBuffer() = default;
    VULKAN_HPP_CONSTEXPR         CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
      : m_commandBuffer( commandBuffer )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
    {
      m_commandBuffer = commandBuffer;
      return *this;
    }
#endif

    CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_commandBuffer = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBuffer const & ) const = default;
#else
    bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_commandBuffer == rhs.m_commandBuffer;
    }

    bool operator!=( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_commandBuffer != rhs.m_commandBuffer;
    }

    bool operator<( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_commandBuffer < rhs.m_commandBuffer;
    }
#endif

    //=== VK_VERSION_1_0 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      begin( const CommandBufferBeginInfo & beginInfo,
             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
         reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
                       VULKAN_HPP_NAMESPACE::Pipeline          pipeline,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setViewport( uint32_t                               firstViewport,
                      uint32_t                               viewportCount,
                      const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setViewport( uint32_t                                                 firstViewport,
                      ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setScissor( uint32_t                             firstScissor,
                     uint32_t                             scissorCount,
                     const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setScissor( uint32_t                                               firstScissor,
                     ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setLineWidth( float            lineWidth,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setDepthBias( float            depthBiasConstantFactor,
                       float            depthBiasClamp,
                       float            depthBiasSlopeFactor,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setBlendConstants( const float      blendConstants[4],
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setDepthBounds( float            minDepthBounds,
                         float            maxDepthBounds,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
                                uint32_t                               compareMask,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
                              uint32_t                               writeMask,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
                              uint32_t                               reference,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint     pipelineBindPoint,
                             VULKAN_HPP_NAMESPACE::PipelineLayout        layout,
                             uint32_t                                    firstSet,
                             uint32_t                                    descriptorSetCount,
                             const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
                             uint32_t                                    dynamicOffsetCount,
                             const uint32_t *                            pDynamicOffsets,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint                       pipelineBindPoint,
                             VULKAN_HPP_NAMESPACE::PipelineLayout                          layout,
                             uint32_t                                                      firstSet,
                             ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
                             ArrayProxy<const uint32_t> const &                            dynamicOffsets,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                          VULKAN_HPP_NAMESPACE::DeviceSize offset,
                          VULKAN_HPP_NAMESPACE::IndexType  indexType,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindVertexBuffers( uint32_t                                 firstBinding,
                            uint32_t                                 bindingCount,
                            const VULKAN_HPP_NAMESPACE::Buffer *     pBuffers,
                            const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindVertexBuffers( uint32_t                                                   firstBinding,
                            ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &     buffers,
                            ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void draw( uint32_t         vertexCount,
               uint32_t         instanceCount,
               uint32_t         firstVertex,
               uint32_t         firstInstance,
               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawIndexed( uint32_t         indexCount,
                      uint32_t         instanceCount,
                      uint32_t         firstIndex,
                      int32_t          vertexOffset,
                      uint32_t         firstInstance,
                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                       VULKAN_HPP_NAMESPACE::DeviceSize offset,
                       uint32_t                         drawCount,
                       uint32_t                         stride,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                              VULKAN_HPP_NAMESPACE::DeviceSize offset,
                              uint32_t                         drawCount,
                              uint32_t                         stride,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void dispatch( uint32_t         groupCountX,
                   uint32_t         groupCountY,
                   uint32_t         groupCountZ,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                           VULKAN_HPP_NAMESPACE::DeviceSize offset,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer             srcBuffer,
                     VULKAN_HPP_NAMESPACE::Buffer             dstBuffer,
                     uint32_t                                 regionCount,
                     const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer                               srcBuffer,
                     VULKAN_HPP_NAMESPACE::Buffer                               dstBuffer,
                     ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyImage( VULKAN_HPP_NAMESPACE::Image             srcImage,
                    VULKAN_HPP_NAMESPACE::ImageLayout       srcImageLayout,
                    VULKAN_HPP_NAMESPACE::Image             dstImage,
                    VULKAN_HPP_NAMESPACE::ImageLayout       dstImageLayout,
                    uint32_t                                regionCount,
                    const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyImage( VULKAN_HPP_NAMESPACE::Image                               srcImage,
                    VULKAN_HPP_NAMESPACE::ImageLayout                         srcImageLayout,
                    VULKAN_HPP_NAMESPACE::Image                               dstImage,
                    VULKAN_HPP_NAMESPACE::ImageLayout                         dstImageLayout,
                    ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void blitImage( VULKAN_HPP_NAMESPACE::Image             srcImage,
                    VULKAN_HPP_NAMESPACE::ImageLayout       srcImageLayout,
                    VULKAN_HPP_NAMESPACE::Image             dstImage,
                    VULKAN_HPP_NAMESPACE::ImageLayout       dstImageLayout,
                    uint32_t                                regionCount,
                    const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,
                    VULKAN_HPP_NAMESPACE::Filter            filter,
                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void blitImage( VULKAN_HPP_NAMESPACE::Image                               srcImage,
                    VULKAN_HPP_NAMESPACE::ImageLayout                         srcImageLayout,
                    VULKAN_HPP_NAMESPACE::Image                               dstImage,
                    VULKAN_HPP_NAMESPACE::ImageLayout                         dstImageLayout,
                    ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
                    VULKAN_HPP_NAMESPACE::Filter                              filter,
                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer                  srcBuffer,
                            VULKAN_HPP_NAMESPACE::Image                   dstImage,
                            VULKAN_HPP_NAMESPACE::ImageLayout             dstImageLayout,
                            uint32_t                                      regionCount,
                            const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer                                    srcBuffer,
                            VULKAN_HPP_NAMESPACE::Image                                     dstImage,
                            VULKAN_HPP_NAMESPACE::ImageLayout                               dstImageLayout,
                            ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image                   srcImage,
                            VULKAN_HPP_NAMESPACE::ImageLayout             srcImageLayout,
                            VULKAN_HPP_NAMESPACE::Buffer                  dstBuffer,
                            uint32_t                                      regionCount,
                            const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image                                     srcImage,
                            VULKAN_HPP_NAMESPACE::ImageLayout                               srcImageLayout,
                            VULKAN_HPP_NAMESPACE::Buffer                                    dstBuffer,
                            ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer     dstBuffer,
                       VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
                       VULKAN_HPP_NAMESPACE::DeviceSize dataSize,
                       const void *                     pData,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer     dstBuffer,
                       VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
                       ArrayProxy<const T> const &      data,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer     dstBuffer,
                     VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
                     VULKAN_HPP_NAMESPACE::DeviceSize size,
                     uint32_t                         data,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void clearColorImage( VULKAN_HPP_NAMESPACE::Image                         image,
                          VULKAN_HPP_NAMESPACE::ImageLayout                   imageLayout,
                          const VULKAN_HPP_NAMESPACE::ClearColorValue *       pColor,
                          uint32_t                                            rangeCount,
                          const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void clearColorImage( VULKAN_HPP_NAMESPACE::Image                                           image,
                          VULKAN_HPP_NAMESPACE::ImageLayout                                     imageLayout,
                          const ClearColorValue &                                               color,
                          ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image                          image,
                              VULKAN_HPP_NAMESPACE::ImageLayout                    imageLayout,
                              const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
                              uint32_t                                             rangeCount,
                              const VULKAN_HPP_NAMESPACE::ImageSubresourceRange *  pRanges,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image                                           image,
                              VULKAN_HPP_NAMESPACE::ImageLayout                                     imageLayout,
                              const ClearDepthStencilValue &                                        depthStencil,
                              ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void clearAttachments( uint32_t                                      attachmentCount,
                           const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
                           uint32_t                                      rectCount,
                           const VULKAN_HPP_NAMESPACE::ClearRect *       pRects,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const &       rects,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void resolveImage( VULKAN_HPP_NAMESPACE::Image                srcImage,
                       VULKAN_HPP_NAMESPACE::ImageLayout          srcImageLayout,
                       VULKAN_HPP_NAMESPACE::Image                dstImage,
                       VULKAN_HPP_NAMESPACE::ImageLayout          dstImageLayout,
                       uint32_t                                   regionCount,
                       const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void resolveImage( VULKAN_HPP_NAMESPACE::Image                                  srcImage,
                       VULKAN_HPP_NAMESPACE::ImageLayout                            srcImageLayout,
                       VULKAN_HPP_NAMESPACE::Image                                  dstImage,
                       VULKAN_HPP_NAMESPACE::ImageLayout                            dstImageLayout,
                       ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setEvent( VULKAN_HPP_NAMESPACE::Event              event,
                   VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void resetEvent( VULKAN_HPP_NAMESPACE::Event              event,
                     VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void waitEvents( uint32_t                                          eventCount,
                     const VULKAN_HPP_NAMESPACE::Event *               pEvents,
                     VULKAN_HPP_NAMESPACE::PipelineStageFlags          srcStageMask,
                     VULKAN_HPP_NAMESPACE::PipelineStageFlags          dstStageMask,
                     uint32_t                                          memoryBarrierCount,
                     const VULKAN_HPP_NAMESPACE::MemoryBarrier *       pMemoryBarriers,
                     uint32_t                                          bufferMemoryBarrierCount,
                     const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
                     uint32_t                                          imageMemoryBarrierCount,
                     const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier *  pImageMemoryBarriers,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const &               events,
                     VULKAN_HPP_NAMESPACE::PipelineStageFlags                            srcStageMask,
                     VULKAN_HPP_NAMESPACE::PipelineStageFlags                            dstStageMask,
                     ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const &       memoryBarriers,
                     ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
                     ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const &  imageMemoryBarriers,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags          srcStageMask,
                          VULKAN_HPP_NAMESPACE::PipelineStageFlags          dstStageMask,
                          VULKAN_HPP_NAMESPACE::DependencyFlags             dependencyFlags,
                          uint32_t                                          memoryBarrierCount,
                          const VULKAN_HPP_NAMESPACE::MemoryBarrier *       pMemoryBarriers,
                          uint32_t                                          bufferMemoryBarrierCount,
                          const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
                          uint32_t                                          imageMemoryBarrierCount,
                          const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier *  pImageMemoryBarriers,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags                            srcStageMask,
                          VULKAN_HPP_NAMESPACE::PipelineStageFlags                            dstStageMask,
                          VULKAN_HPP_NAMESPACE::DependencyFlags                               dependencyFlags,
                          ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const &       memoryBarriers,
                          ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
                          ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const &  imageMemoryBarriers,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool         queryPool,
                     uint32_t                                query,
                     VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                   uint32_t                        query,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                         uint32_t                        firstQuery,
                         uint32_t                        queryCount,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
                         VULKAN_HPP_NAMESPACE::QueryPool             queryPool,
                         uint32_t                                    query,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                               uint32_t                               firstQuery,
                               uint32_t                               queryCount,
                               VULKAN_HPP_NAMESPACE::Buffer           dstBuffer,
                               VULKAN_HPP_NAMESPACE::DeviceSize       dstOffset,
                               VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                               VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout   layout,
                        VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
                        uint32_t                               offset,
                        uint32_t                               size,
                        const void *                           pValues,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout   layout,
                        VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
                        uint32_t                               offset,
                        ArrayProxy<const T> const &            values,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
                          VULKAN_HPP_NAMESPACE::SubpassContents             contents,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginRenderPass( const RenderPassBeginInfo &           renderPassBegin,
                          VULKAN_HPP_NAMESPACE::SubpassContents contents,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents,
                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void executeCommands( uint32_t                                    commandBufferCount,
                          const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_VERSION_1_1 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setDeviceMask( uint32_t         deviceMask,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void dispatchBase( uint32_t         baseGroupX,
                       uint32_t         baseGroupY,
                       uint32_t         baseGroupZ,
                       uint32_t         groupCountX,
                       uint32_t         groupCountY,
                       uint32_t         groupCountZ,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_VERSION_1_2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                            VULKAN_HPP_NAMESPACE::DeviceSize offset,
                            VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                            VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                            uint32_t                         maxDrawCount,
                            uint32_t                         stride,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                uint32_t                         maxDrawCount,
                                uint32_t                         stride,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
                           const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *    pSubpassBeginInfo,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin,
                           const SubpassBeginInfo &    subpassBeginInfo,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
                       const VULKAN_HPP_NAMESPACE::SubpassEndInfo *   pSubpassEndInfo,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo,
                       const SubpassEndInfo &   subpassEndInfo,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endRenderPass2( const SubpassEndInfo & subpassEndInfo,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_debug_marker ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
    //=== VK_KHR_video_queue ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
    //=== VK_KHR_video_decode_queue ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_ENABLE_BETA_EXTENSIONS*/

    //=== VK_EXT_transform_feedback ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindTransformFeedbackBuffersEXT( uint32_t                                 firstBinding,
                                          uint32_t                                 bindingCount,
                                          const VULKAN_HPP_NAMESPACE::Buffer *     pBuffers,
                                          const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
                                          const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindTransformFeedbackBuffersEXT(
      uint32_t                                                   firstBinding,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &     buffers,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginTransformFeedbackEXT( uint32_t                                 firstCounterBuffer,
                                    uint32_t                                 counterBufferCount,
                                    const VULKAN_HPP_NAMESPACE::Buffer *     pCounterBuffers,
                                    const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginTransformFeedbackEXT( uint32_t                                                   firstCounterBuffer,
                                    ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &     counterBuffers,
                                    ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
                                                                                               VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      endTransformFeedbackEXT( uint32_t                                 firstCounterBuffer,
                               uint32_t                                 counterBufferCount,
                               const VULKAN_HPP_NAMESPACE::Buffer *     pCounterBuffers,
                               const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endTransformFeedbackEXT( uint32_t                                                   firstCounterBuffer,
                                  ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &     counterBuffers,
                                  ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
                                                                                             VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool         queryPool,
                               uint32_t                                query,
                               VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
                               uint32_t                                index,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                             uint32_t                        query,
                             uint32_t                        index,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      drawIndirectByteCountEXT( uint32_t                         instanceCount,
                                uint32_t                         firstInstance,
                                VULKAN_HPP_NAMESPACE::Buffer     counterBuffer,
                                VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
                                uint32_t                         counterOffset,
                                uint32_t                         vertexStride,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_NVX_binary_import ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_AMD_draw_indirect_count ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                               VULKAN_HPP_NAMESPACE::DeviceSize offset,
                               VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                               VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                               uint32_t                         maxDrawCount,
                               uint32_t                         stride,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                      VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                      VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                      VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                      uint32_t                         maxDrawCount,
                                      uint32_t                         stride,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    //=== VK_KHR_device_group ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setDeviceMaskKHR( uint32_t         deviceMask,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void dispatchBaseKHR( uint32_t         baseGroupX,
                          uint32_t         baseGroupY,
                          uint32_t         baseGroupZ,
                          uint32_t         groupCountX,
                          uint32_t         groupCountY,
                          uint32_t         groupCountZ,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_KHR_push_descriptor ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint          pipelineBindPoint,
                               VULKAN_HPP_NAMESPACE::PipelineLayout             layout,
                               uint32_t                                         set,
                               uint32_t                                         descriptorWriteCount,
                               const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint                            pipelineBindPoint,
                               VULKAN_HPP_NAMESPACE::PipelineLayout                               layout,
                               uint32_t                                                           set,
                               ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                                           VULKAN_HPP_NAMESPACE::PipelineLayout           layout,
                                           uint32_t                                       set,
                                           const void *                                   pData,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    //=== VK_EXT_conditional_rendering ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginConditionalRenderingEXT(
      const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    //=== VK_NV_clip_space_w_scaling ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setViewportWScalingNV( uint32_t                                         firstViewport,
                                uint32_t                                         viewportCount,
                                const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setViewportWScalingNV( uint32_t                                                           firstViewport,
                                ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_discard_rectangles ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setDiscardRectangleEXT( uint32_t                             firstDiscardRectangle,
                              uint32_t                             discardRectangleCount,
                              const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setDiscardRectangleEXT( uint32_t                                               firstDiscardRectangle,
                              ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_create_renderpass2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
                              const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *    pSubpassBeginInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin,
                              const SubpassBeginInfo &    subpassBeginInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
                          const VULKAN_HPP_NAMESPACE::SubpassEndInfo *   pSubpassEndInfo,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo,
                          const SubpassEndInfo &   subpassEndInfo,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_debug_utils ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_sample_locations ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_acceleration_structure ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void buildAccelerationStructuresKHR(
      uint32_t                                                                     infoCount,
      const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *      pInfos,
      const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void buildAccelerationStructuresKHR(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const &      infos,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void buildAccelerationStructuresIndirectKHR(
      uint32_t                                                                infoCount,
      const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
      const VULKAN_HPP_NAMESPACE::DeviceAddress *                             pIndirectDeviceAddresses,
      const uint32_t *                                                        pIndirectStrides,
      const uint32_t * const *                                                ppMaxPrimitiveCounts,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void buildAccelerationStructuresIndirectKHR(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const &                             indirectDeviceAddresses,
      ArrayProxy<const uint32_t> const &                                                        indirectStrides,
      ArrayProxy<const uint32_t * const> const &                                                pMaxPrimitiveCounts,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyAccelerationStructureToMemoryKHR(
      const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyMemoryToAccelerationStructureKHR(
      const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void writeAccelerationStructuresPropertiesKHR(
      uint32_t                                               accelerationStructureCount,
      const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
      VULKAN_HPP_NAMESPACE::QueryType                        queryType,
      VULKAN_HPP_NAMESPACE::QueryPool                        queryPool,
      uint32_t                                               firstQuery,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void writeAccelerationStructuresPropertiesKHR(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
      VULKAN_HPP_NAMESPACE::QueryType                                          queryType,
      VULKAN_HPP_NAMESPACE::QueryPool                                          queryPool,
      uint32_t                                                                 firstQuery,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_NV_shading_rate_image ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView   imageView,
                              VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setViewportShadingRatePaletteNV( uint32_t                                           firstViewport,
                                          uint32_t                                           viewportCount,
                                          const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setViewportShadingRatePaletteNV(
      uint32_t                                                             firstViewport,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV           sampleOrderType,
                              uint32_t                                                customSampleOrderCount,
                              const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setCoarseSampleOrderNV(
      VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV                             sampleOrderType,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_NV_ray_tracing ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
                                       VULKAN_HPP_NAMESPACE::Buffer                              instanceData,
                                       VULKAN_HPP_NAMESPACE::DeviceSize                          instanceOffset,
                                       VULKAN_HPP_NAMESPACE::Bool32                              update,
                                       VULKAN_HPP_NAMESPACE::AccelerationStructureNV             dst,
                                       VULKAN_HPP_NAMESPACE::AccelerationStructureNV             src,
                                       VULKAN_HPP_NAMESPACE::Buffer                              scratch,
                                       VULKAN_HPP_NAMESPACE::DeviceSize                          scratchOffset,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void buildAccelerationStructureNV( const AccelerationStructureInfoNV &           info,
                                       VULKAN_HPP_NAMESPACE::Buffer                  instanceData,
                                       VULKAN_HPP_NAMESPACE::DeviceSize              instanceOffset,
                                       VULKAN_HPP_NAMESPACE::Bool32                  update,
                                       VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
                                       VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
                                       VULKAN_HPP_NAMESPACE::Buffer                  scratch,
                                       VULKAN_HPP_NAMESPACE::DeviceSize              scratchOffset,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV          dst,
                                      VULKAN_HPP_NAMESPACE::AccelerationStructureNV          src,
                                      VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer     raygenShaderBindingTableBuffer,
                      VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
                      VULKAN_HPP_NAMESPACE::Buffer     missShaderBindingTableBuffer,
                      VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
                      VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
                      VULKAN_HPP_NAMESPACE::Buffer     hitShaderBindingTableBuffer,
                      VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
                      VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
                      VULKAN_HPP_NAMESPACE::Buffer     callableShaderBindingTableBuffer,
                      VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
                      VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
                      uint32_t                         width,
                      uint32_t                         height,
                      uint32_t                         depth,
                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void writeAccelerationStructuresPropertiesNV(
      uint32_t                                              accelerationStructureCount,
      const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
      VULKAN_HPP_NAMESPACE::QueryType                       queryType,
      VULKAN_HPP_NAMESPACE::QueryPool                       queryPool,
      uint32_t                                              firstQuery,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void writeAccelerationStructuresPropertiesNV(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
      VULKAN_HPP_NAMESPACE::QueryType                                         queryType,
      VULKAN_HPP_NAMESPACE::QueryPool                                         queryPool,
      uint32_t                                                                firstQuery,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_draw_indirect_count ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                               VULKAN_HPP_NAMESPACE::DeviceSize offset,
                               VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                               VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                               uint32_t                         maxDrawCount,
                               uint32_t                         stride,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                      VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                      VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                      VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                      uint32_t                         maxDrawCount,
                                      uint32_t                         stride,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    //=== VK_AMD_buffer_marker ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
                               VULKAN_HPP_NAMESPACE::Buffer                dstBuffer,
                               VULKAN_HPP_NAMESPACE::DeviceSize            dstOffset,
                               uint32_t                                    marker,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_NV_mesh_shader ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawMeshTasksNV( uint32_t         taskCount,
                          uint32_t         firstTask,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                               VULKAN_HPP_NAMESPACE::DeviceSize offset,
                               uint32_t                         drawCount,
                               uint32_t                         stride,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                       VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                       VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                       VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                       uint32_t                         maxDrawCount,
                                       uint32_t                         stride,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    //=== VK_NV_scissor_exclusive ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setExclusiveScissorNV( uint32_t                             firstExclusiveScissor,
                                uint32_t                             exclusiveScissorCount,
                                const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setExclusiveScissorNV( uint32_t                                               firstExclusiveScissor,
                                ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_NV_device_diagnostic_checkpoints ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setCheckpointNV( const void *     pCheckpointMarker,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_INTEL_performance_query ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL(
      const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL(
      const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL(
      const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_fragment_shading_rate ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D *                       pFragmentSize,
                                    const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setFragmentShadingRateKHR( const Extent2D &                                             fragmentSize,
                                    const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_line_rasterization ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setLineStippleEXT( uint32_t         lineStippleFactor,
                            uint16_t         lineStipplePattern,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_EXT_extended_dynamic_state ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setViewportWithCountEXT( uint32_t                               viewportCount,
                               const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setScissorWithCountEXT( uint32_t                             scissorCount,
                              const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindVertexBuffers2EXT( uint32_t                                 firstBinding,
                                uint32_t                                 bindingCount,
                                const VULKAN_HPP_NAMESPACE::Buffer *     pBuffers,
                                const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
                                const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
                                const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindVertexBuffers2EXT(
      uint32_t                                                   firstBinding,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &     buffers,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
                          VULKAN_HPP_NAMESPACE::StencilOp        failOp,
                          VULKAN_HPP_NAMESPACE::StencilOp        passOp,
                          VULKAN_HPP_NAMESPACE::StencilOp        depthFailOp,
                          VULKAN_HPP_NAMESPACE::CompareOp        compareOp,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_NV_device_generated_commands ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32                          isPreprocessed,
                                     const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32    isPreprocessed,
                                     const GeneratedCommandsInfoNV & generatedCommandsInfo,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
                                    VULKAN_HPP_NAMESPACE::Pipeline          pipeline,
                                    uint32_t                                groupIndex,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

#if defined( VK_ENABLE_BETA_EXTENSIONS )
    //=== VK_KHR_video_encode_queue ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_ENABLE_BETA_EXTENSIONS*/

    //=== VK_KHR_synchronization2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event                     event,
                       const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
                       const DependencyInfoKHR &   dependencyInfo,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event                  event,
                         VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void waitEvents2KHR( uint32_t                                        eventCount,
                         const VULKAN_HPP_NAMESPACE::Event *             pEvents,
                         const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const &             events,
                         ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
                             VULKAN_HPP_NAMESPACE::QueryPool              queryPool,
                             uint32_t                                     query,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
                                VULKAN_HPP_NAMESPACE::Buffer                 dstBuffer,
                                VULKAN_HPP_NAMESPACE::DeviceSize             dstOffset,
                                uint32_t                                     marker,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_NV_fragment_shading_rate_enums ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV                  shadingRate,
                                       const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    //=== VK_KHR_copy_commands2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_ray_tracing_pipeline ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
                       const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
                       const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
                       const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
                       uint32_t                                                    width,
                       uint32_t                                                    height,
                       uint32_t                                                    depth,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
                       const StridedDeviceAddressRegionKHR & missShaderBindingTable,
                       const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
                       const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
                       uint32_t                              width,
                       uint32_t                              height,
                       uint32_t                              depth,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
                               const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
                               const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
                               const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
                               VULKAN_HPP_NAMESPACE::DeviceAddress                         indirectDeviceAddress,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
                               const StridedDeviceAddressRegionKHR & missShaderBindingTable,
                               const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
                               const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
                               VULKAN_HPP_NAMESPACE::DeviceAddress   indirectDeviceAddress,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setRayTracingPipelineStackSizeKHR( uint32_t         pipelineStackSize,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    //=== VK_EXT_vertex_input_dynamic_state ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setVertexInputEXT( uint32_t                                                        vertexBindingDescriptionCount,
                         const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
                         uint32_t vertexAttributeDescriptionCount,
                         const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setVertexInputEXT(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const &   vertexBindingDescriptions,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_extended_dynamic_state2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setPatchControlPointsEXT( uint32_t         patchControlPoints,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    //=== VK_EXT_color_write_enable ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setColorWriteEnableEXT( uint32_t                             attachmentCount,
                              const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
    {
      return m_commandBuffer;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_commandBuffer != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_commandBuffer == VK_NULL_HANDLE;
    }

  private:
    VkCommandBuffer m_commandBuffer = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eCommandBuffer>
  {
    using type = VULKAN_HPP_NAMESPACE::CommandBuffer;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
  {
    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
  {
    using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandBuffer>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct CommandBufferSubmitInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfoKHR( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {},
                                                     uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : commandBuffer( commandBuffer_ )
      , deviceMask( deviceMask_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CommandBufferSubmitInfoKHR( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferSubmitInfoKHR( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandBufferSubmitInfoKHR( *reinterpret_cast<CommandBufferSubmitInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfoKHR &
                            operator=( CommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandBufferSubmitInfoKHR & operator=( VkCommandBufferSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR const *>( &rhs );
      return *this;
    }

    CommandBufferSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CommandBufferSubmitInfoKHR &
      setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBuffer = commandBuffer_;
      return *this;
    }

    CommandBufferSubmitInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }

    operator VkCommandBufferSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandBufferSubmitInfoKHR *>( this );
    }

    operator VkCommandBufferSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandBufferSubmitInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandBufferSubmitInfoKHR const & ) const = default;
#else
    bool operator==( CommandBufferSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) &&
             ( deviceMask == rhs.deviceMask );
    }

    bool operator!=( CommandBufferSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::eCommandBufferSubmitInfoKHR;
    const void *                        pNext         = {};
    VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {};
    uint32_t                            deviceMask    = {};
  };
  static_assert( sizeof( CommandBufferSubmitInfoKHR ) == sizeof( VkCommandBufferSubmitInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CommandBufferSubmitInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCommandBufferSubmitInfoKHR>
  {
    using Type = CommandBufferSubmitInfoKHR;
  };

  struct CommandPoolCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {},
                                                uint32_t queueFamilyIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , queueFamilyIndex( queueFamilyIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo &
                            operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
      return *this;
    }

    CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCommandPoolCreateInfo *>( this );
    }

    operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCommandPoolCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CommandPoolCreateInfo const & ) const = default;
#else
    bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( queueFamilyIndex == rhs.queueFamilyIndex );
    }

    bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType            = StructureType::eCommandPoolCreateInfo;
    const void *                                 pNext            = {};
    VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags            = {};
    uint32_t                                     queueFamilyIndex = {};
  };
  static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
  {
    using Type = CommandPoolCreateInfo;
  };

  class ShaderModule
  {
  public:
    using CType = VkShaderModule;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;

  public:
    VULKAN_HPP_CONSTEXPR         ShaderModule() = default;
    VULKAN_HPP_CONSTEXPR         ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
      : m_shaderModule( shaderModule )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
    {
      m_shaderModule = shaderModule;
      return *this;
    }
#endif

    ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_shaderModule = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShaderModule const & ) const = default;
#else
    bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_shaderModule == rhs.m_shaderModule;
    }

    bool operator!=( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_shaderModule != rhs.m_shaderModule;
    }

    bool operator<( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_shaderModule < rhs.m_shaderModule;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
    {
      return m_shaderModule;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_shaderModule != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_shaderModule == VK_NULL_HANDLE;
    }

  private:
    VkShaderModule m_shaderModule = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eShaderModule>
  {
    using type = VULKAN_HPP_NAMESPACE::ShaderModule;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
  {
    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
  {
    using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderModule>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct SpecializationMapEntry
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT
      : constantID( constantID_ )
      , offset( offset_ )
      , size( size_ )
    {}

    VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
      : SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry &
                            operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
      return *this;
    }

    SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
    {
      constantID = constantID_;
      return *this;
    }

    SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSpecializationMapEntry *>( this );
    }

    operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSpecializationMapEntry *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SpecializationMapEntry const & ) const = default;
#else
    bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size );
    }

    bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t constantID = {};
    uint32_t offset     = {};
    size_t   size       = {};
  };
  static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );

  struct SpecializationInfo
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t                                             mapEntryCount_ = {},
                                             const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_   = {},
                                             size_t                                               dataSize_      = {},
                                             const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
      : mapEntryCount( mapEntryCount_ )
      , pMapEntries( pMapEntries_ )
      , dataSize( dataSize_ )
      , pData( pData_ )
    {}

    VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
                          const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const &  mapEntries_,
                        VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
      : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) )
      , pMapEntries( mapEntries_.data() )
      , dataSize( data_.size() * sizeof( T ) )
      , pData( data_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SpecializationInfo &
                            operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
      return *this;
    }

    SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      mapEntryCount = mapEntryCount_;
      return *this;
    }

    SpecializationInfo &
      setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      pMapEntries = pMapEntries_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SpecializationInfo & setMapEntries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const &
        mapEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
      pMapEntries   = mapEntries_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    SpecializationInfo &
      setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = data_.size() * sizeof( T );
      pData    = data_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSpecializationInfo *>( this );
    }

    operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSpecializationInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SpecializationInfo const & ) const = default;
#else
    bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) &&
             ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
    }

    bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                             mapEntryCount = {};
    const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries   = {};
    size_t                                               dataSize      = {};
    const void *                                         pData         = {};
  };
  static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );

  struct PipelineShaderStageCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_  = {},
      VULKAN_HPP_NAMESPACE::ShaderStageFlagBits            stage_  = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex,
      VULKAN_HPP_NAMESPACE::ShaderModule                   module_ = {},
      const char *                                         pName_  = {},
      const VULKAN_HPP_NAMESPACE::SpecializationInfo *     pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , stage( stage_ )
      , module( module_ )
      , pName( pName_ )
      , pSpecializationInfo( pSpecializationInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo &
                            operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineShaderStageCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
    {
      stage = stage_;
      return *this;
    }

    PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
    {
      module = module_;
      return *this;
    }

    PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
    {
      pName = pName_;
      return *this;
    }

    PipelineShaderStageCreateInfo & setPSpecializationInfo(
      const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pSpecializationInfo = pSpecializationInfo_;
      return *this;
    }

    operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineShaderStageCreateInfo *>( this );
    }

    operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineShaderStageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineShaderStageCreateInfo const & ) const = default;
#else
    bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) &&
             ( module == rhs.module ) && ( pName == rhs.pName ) && ( pSpecializationInfo == rhs.pSpecializationInfo );
    }

    bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                  sType  = StructureType::ePipelineShaderStageCreateInfo;
    const void *                                         pNext  = {};
    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags  = {};
    VULKAN_HPP_NAMESPACE::ShaderStageFlagBits            stage  = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
    VULKAN_HPP_NAMESPACE::ShaderModule                   module = {};
    const char *                                         pName  = {};
    const VULKAN_HPP_NAMESPACE::SpecializationInfo *     pSpecializationInfo = {};
  };
  static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
  {
    using Type = PipelineShaderStageCreateInfo;
  };

  struct ComputePipelineCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags           flags_  = {},
                                                    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_  = {},
                                                    VULKAN_HPP_NAMESPACE::PipelineLayout                layout_ = {},
                                                    VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_          = {},
                                                    int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , stage( stage_ )
      , layout( layout_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo &
                            operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
      return *this;
    }

    ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ComputePipelineCreateInfo &
      setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
    {
      stage = stage_;
      return *this;
    }

    ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    ComputePipelineCreateInfo &
      setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineHandle = basePipelineHandle_;
      return *this;
    }

    ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineIndex = basePipelineIndex_;
      return *this;
    }

    operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkComputePipelineCreateInfo *>( this );
    }

    operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkComputePipelineCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ComputePipelineCreateInfo const & ) const = default;
#else
    bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) &&
             ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) &&
             ( basePipelineIndex == rhs.basePipelineIndex );
    }

    bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType              = StructureType::eComputePipelineCreateInfo;
    const void *                                        pNext              = {};
    VULKAN_HPP_NAMESPACE::PipelineCreateFlags           flags              = {};
    VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage              = {};
    VULKAN_HPP_NAMESPACE::PipelineLayout                layout             = {};
    VULKAN_HPP_NAMESPACE::Pipeline                      basePipelineHandle = {};
    int32_t                                             basePipelineIndex  = {};
  };
  static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
  {
    using Type = ComputePipelineCreateInfo;
  };

  struct ConformanceVersion
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_    = {},
                                             uint8_t minor_    = {},
                                             uint8_t subminor_ = {},
                                             uint8_t patch_    = {} ) VULKAN_HPP_NOEXCEPT
      : major( major_ )
      , minor( minor_ )
      , subminor( subminor_ )
      , patch( patch_ )
    {}

    VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
      : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ConformanceVersion &
                            operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
      return *this;
    }

    ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
    {
      major = major_;
      return *this;
    }

    ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
    {
      minor = minor_;
      return *this;
    }

    ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
    {
      subminor = subminor_;
      return *this;
    }

    ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
    {
      patch = patch_;
      return *this;
    }

    operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkConformanceVersion *>( this );
    }

    operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkConformanceVersion *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ConformanceVersion const & ) const = default;
#else
    bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch );
    }

    bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint8_t major    = {};
    uint8_t minor    = {};
    uint8_t subminor = {};
    uint8_t patch    = {};
  };
  static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
  using ConformanceVersionKHR = ConformanceVersion;

  struct CooperativeMatrixPropertiesNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(
      uint32_t                              MSize_ = {},
      uint32_t                              NSize_ = {},
      uint32_t                              KSize_ = {},
      VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
      VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
      VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
      VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16,
      VULKAN_HPP_NAMESPACE::ScopeNV         scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT
      : MSize( MSize_ )
      , NSize( NSize_ )
      , KSize( KSize_ )
      , AType( AType_ )
      , BType( BType_ )
      , CType( CType_ )
      , DType( DType_ )
      , scope( scope_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV &
                            operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
      return *this;
    }

    CooperativeMatrixPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
    {
      MSize = MSize_;
      return *this;
    }

    CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
    {
      NSize = NSize_;
      return *this;
    }

    CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
    {
      KSize = KSize_;
      return *this;
    }

    CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
    {
      AType = AType_;
      return *this;
    }

    CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
    {
      BType = BType_;
      return *this;
    }

    CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
    {
      CType = CType_;
      return *this;
    }

    CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
    {
      DType = DType_;
      return *this;
    }

    CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
    {
      scope = scope_;
      return *this;
    }

    operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV *>( this );
    }

    operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default;
#else
    bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) &&
             ( KSize == rhs.KSize ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) &&
             ( DType == rhs.DType ) && ( scope == rhs.scope );
    }

    bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType = StructureType::eCooperativeMatrixPropertiesNV;
    void *                                pNext = {};
    uint32_t                              MSize = {};
    uint32_t                              NSize = {};
    uint32_t                              KSize = {};
    VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
    VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
    VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
    VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
    VULKAN_HPP_NAMESPACE::ScopeNV         scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
  };
  static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
  {
    using Type = CooperativeMatrixPropertiesNV;
  };

  struct CopyCommandTransformInfoQCOM
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ =
                                      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT
      : transform( transform_ )
    {}

    VULKAN_HPP_CONSTEXPR
      CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM &
                            operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
      return *this;
    }

    CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CopyCommandTransformInfoQCOM &
      setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }

    operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM *>( this );
    }

    operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyCommandTransformInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default;
#else
    bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform );
    }

    bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType = StructureType::eCopyCommandTransformInfoQCOM;
    const void *                                      pNext = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform =
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
  };
  static_assert( sizeof( CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CopyCommandTransformInfoQCOM>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
  {
    using Type = CopyCommandTransformInfoQCOM;
  };

  struct CopyDescriptorSet
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_          = {},
                                            uint32_t                            srcBinding_      = {},
                                            uint32_t                            srcArrayElement_ = {},
                                            VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_          = {},
                                            uint32_t                            dstBinding_      = {},
                                            uint32_t                            dstArrayElement_ = {},
                                            uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcSet( srcSet_ )
      , srcBinding( srcBinding_ )
      , srcArrayElement( srcArrayElement_ )
      , dstSet( dstSet_ )
      , dstBinding( dstBinding_ )
      , dstArrayElement( dstArrayElement_ )
      , descriptorCount( descriptorCount_ )
    {}

    VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
      : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet &
                            operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
      return *this;
    }

    CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSet = srcSet_;
      return *this;
    }

    CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      srcBinding = srcBinding_;
      return *this;
    }

    CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
    {
      srcArrayElement = srcArrayElement_;
      return *this;
    }

    CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSet = dstSet_;
      return *this;
    }

    CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBinding = dstBinding_;
      return *this;
    }

    CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
    {
      dstArrayElement = dstArrayElement_;
      return *this;
    }

    CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }

    operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCopyDescriptorSet *>( this );
    }

    operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCopyDescriptorSet *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CopyDescriptorSet const & ) const = default;
#else
    bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) &&
             ( srcBinding == rhs.srcBinding ) && ( srcArrayElement == rhs.srcArrayElement ) &&
             ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) &&
             ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount );
    }

    bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::eCopyDescriptorSet;
    const void *                        pNext           = {};
    VULKAN_HPP_NAMESPACE::DescriptorSet srcSet          = {};
    uint32_t                            srcBinding      = {};
    uint32_t                            srcArrayElement = {};
    VULKAN_HPP_NAMESPACE::DescriptorSet dstSet          = {};
    uint32_t                            dstBinding      = {};
    uint32_t                            dstArrayElement = {};
    uint32_t                            descriptorCount = {};
  };
  static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCopyDescriptorSet>
  {
    using Type = CopyDescriptorSet;
  };

  class CuModuleNVX
  {
  public:
    using CType = VkCuModuleNVX;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX;

  public:
    VULKAN_HPP_CONSTEXPR         CuModuleNVX() = default;
    VULKAN_HPP_CONSTEXPR         CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT
      : m_cuModuleNVX( cuModuleNVX )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    CuModuleNVX & operator=( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT
    {
      m_cuModuleNVX = cuModuleNVX;
      return *this;
    }
#endif

    CuModuleNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_cuModuleNVX = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CuModuleNVX const & ) const = default;
#else
    bool operator==( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_cuModuleNVX == rhs.m_cuModuleNVX;
    }

    bool operator!=( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_cuModuleNVX != rhs.m_cuModuleNVX;
    }

    bool operator<( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_cuModuleNVX < rhs.m_cuModuleNVX;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT
    {
      return m_cuModuleNVX;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_cuModuleNVX != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_cuModuleNVX == VK_NULL_HANDLE;
    }

  private:
    VkCuModuleNVX m_cuModuleNVX = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eCuModuleNVX>
  {
    using type = VULKAN_HPP_NAMESPACE::CuModuleNVX;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX>
  {
    using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX>
  {
    using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CuModuleNVX>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct CuFunctionCreateInfoNVX
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {},
                                                  const char *                      pName_  = {} ) VULKAN_HPP_NOEXCEPT
      : module( module_ )
      , pName( pName_ )
    {}

    VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : CuFunctionCreateInfoNVX( *reinterpret_cast<CuFunctionCreateInfoNVX const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX &
                            operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const *>( &rhs );
      return *this;
    }

    CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT
    {
      module = module_;
      return *this;
    }

    CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT
    {
      pName = pName_;
      return *this;
    }

    operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( this );
    }

    operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCuFunctionCreateInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CuFunctionCreateInfoNVX const & ) const = default;
#else
    bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( pName == rhs.pName );
    }

    bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eCuFunctionCreateInfoNVX;
    const void *                        pNext  = {};
    VULKAN_HPP_NAMESPACE::CuModuleNVX   module = {};
    const char *                        pName  = {};
  };
  static_assert( sizeof( CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CuFunctionCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCuFunctionCreateInfoNVX>
  {
    using Type = CuFunctionCreateInfoNVX;
  };

  struct CuModuleCreateInfoNVX
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
      : dataSize( dataSize_ )
      , pData( pData_ )
    {}

    VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : CuModuleCreateInfoNVX( *reinterpret_cast<CuModuleCreateInfoNVX const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX &
                            operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const *>( &rhs );
      return *this;
    }

    CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }

    operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkCuModuleCreateInfoNVX *>( this );
    }

    operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkCuModuleCreateInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( CuModuleCreateInfoNVX const & ) const = default;
#else
    bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
    }

    bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::eCuModuleCreateInfoNVX;
    const void *                        pNext    = {};
    size_t                              dataSize = {};
    const void *                        pData    = {};
  };
  static_assert( sizeof( CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<CuModuleCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eCuModuleCreateInfoNVX>
  {
    using Type = CuModuleCreateInfoNVX;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct D3D12FenceSubmitInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t         waitSemaphoreValuesCount_   = {},
                                                  const uint64_t * pWaitSemaphoreValues_       = {},
                                                  uint32_t         signalSemaphoreValuesCount_ = {},
                                                  const uint64_t * pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT
      : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ )
      , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
      , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ )
      , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
    {}

    VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    D3D12FenceSubmitInfoKHR(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
      : waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
      , pWaitSemaphoreValues( waitSemaphoreValues_.data() )
      , signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) )
      , pSignalSemaphoreValues( signalSemaphoreValues_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR &
                            operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
      return *this;
    }

    D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
      return *this;
    }

    D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphoreValues = pWaitSemaphoreValues_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
      pWaitSemaphoreValues     = waitSemaphoreValues_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
      return *this;
    }

    D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphoreValues = pSignalSemaphoreValues_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
      pSignalSemaphoreValues     = signalSemaphoreValues_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR *>( this );
    }

    operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default;
#  else
    bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) &&
             ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) &&
             ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) &&
             ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
    }

    bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                      = StructureType::eD3D12FenceSubmitInfoKHR;
    const void *                        pNext                      = {};
    uint32_t                            waitSemaphoreValuesCount   = {};
    const uint64_t *                    pWaitSemaphoreValues       = {};
    uint32_t                            signalSemaphoreValuesCount = {};
    const uint64_t *                    pSignalSemaphoreValues     = {};
  };
  static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
  {
    using Type = D3D12FenceSubmitInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  struct DebugMarkerObjectNameInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ =
                                                         VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
                                                       uint64_t     object_      = {},
                                                       const char * pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT
      : objectType( objectType_ )
      , object( object_ )
      , pObjectName( pObjectName_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT &
                            operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
      return *this;
    }

    DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DebugMarkerObjectNameInfoEXT &
      setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
    {
      objectType = objectType_;
      return *this;
    }

    DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
    {
      object = object_;
      return *this;
    }

    DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
    {
      pObjectName = pObjectName_;
      return *this;
    }

    operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( this );
    }

    operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugMarkerObjectNameInfoEXT const & ) const = default;
#else
    bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) &&
             ( object == rhs.object ) && ( pObjectName == rhs.pObjectName );
    }

    bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType = StructureType::eDebugMarkerObjectNameInfoEXT;
    const void *                                   pNext = {};
    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
    uint64_t     object      = {};
    const char * pObjectName = {};
  };
  static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DebugMarkerObjectNameInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
  {
    using Type = DebugMarkerObjectNameInfoEXT;
  };

  struct DebugMarkerObjectTagInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ =
                                                        VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown,
                                                      uint64_t     object_  = {},
                                                      uint64_t     tagName_ = {},
                                                      size_t       tagSize_ = {},
                                                      const void * pTag_    = {} ) VULKAN_HPP_NOEXCEPT
      : objectType( objectType_ )
      , object( object_ )
      , tagName( tagName_ )
      , tagSize( tagSize_ )
      , pTag( pTag_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT                 objectType_,
                                 uint64_t                                                       object_,
                                 uint64_t                                                       tagName_,
                                 VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
      : objectType( objectType_ )
      , object( object_ )
      , tagName( tagName_ )
      , tagSize( tag_.size() * sizeof( T ) )
      , pTag( tag_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT &
                            operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
      return *this;
    }

    DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DebugMarkerObjectTagInfoEXT &
      setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
    {
      objectType = objectType_;
      return *this;
    }

    DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
    {
      object = object_;
      return *this;
    }

    DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
    {
      tagName = tagName_;
      return *this;
    }

    DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tagSize_;
      return *this;
    }

    DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
    {
      pTag = pTag_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DebugMarkerObjectTagInfoEXT &
      setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tag_.size() * sizeof( T );
      pTag    = tag_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( this );
    }

    operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default;
#else
    bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) &&
             ( object == rhs.object ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) &&
             ( pTag == rhs.pTag );
    }

    bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType = StructureType::eDebugMarkerObjectTagInfoEXT;
    const void *                                   pNext = {};
    VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
    uint64_t     object  = {};
    uint64_t     tagName = {};
    size_t       tagSize = {};
    const void * pTag    = {};
  };
  static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DebugMarkerObjectTagInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
  {
    using Type = DebugMarkerObjectTagInfoEXT;
  };

  struct DebugReportCallbackCreateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_       = {},
                                                           PFN_vkDebugReportCallbackEXT              pfnCallback_ = {},
                                                           void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pfnCallback( pfnCallback_ )
      , pUserData( pUserData_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT &
                            operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
      return *this;
    }

    DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnCallback = pfnCallback_;
      return *this;
    }

    DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }

    operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( this );
    }

    operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugReportCallbackCreateInfoEXT const & ) const = default;
#else
    bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData );
    }

    bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType       = StructureType::eDebugReportCallbackCreateInfoEXT;
    const void *                              pNext       = {};
    VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags       = {};
    PFN_vkDebugReportCallbackEXT              pfnCallback = {};
    void *                                    pUserData   = {};
  };
  static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DebugReportCallbackCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
  {
    using Type = DebugReportCallbackCreateInfoEXT;
  };

  struct DebugUtilsObjectNameInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(
      VULKAN_HPP_NAMESPACE::ObjectType objectType_   = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
      uint64_t                         objectHandle_ = {},
      const char *                     pObjectName_  = {} ) VULKAN_HPP_NOEXCEPT
      : objectType( objectType_ )
      , objectHandle( objectHandle_ )
      , pObjectName( pObjectName_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT &
                            operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
      return *this;
    }

    DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
    {
      objectType = objectType_;
      return *this;
    }

    DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      objectHandle = objectHandle_;
      return *this;
    }

    DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT
    {
      pObjectName = pObjectName_;
      return *this;
    }

    operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( this );
    }

    operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugUtilsObjectNameInfoEXT const & ) const = default;
#else
    bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) &&
             ( objectHandle == rhs.objectHandle ) && ( pObjectName == rhs.pObjectName );
    }

    bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType        = StructureType::eDebugUtilsObjectNameInfoEXT;
    const void *                        pNext        = {};
    VULKAN_HPP_NAMESPACE::ObjectType    objectType   = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
    uint64_t                            objectHandle = {};
    const char *                        pObjectName  = {};
  };
  static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DebugUtilsObjectNameInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
  {
    using Type = DebugUtilsObjectNameInfoEXT;
  };

  struct DebugUtilsMessengerCallbackDataEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDebugUtilsMessengerCallbackDataEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(
      VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_            = {},
      const char *                                                  pMessageIdName_   = {},
      int32_t                                                       messageIdNumber_  = {},
      const char *                                                  pMessage_         = {},
      uint32_t                                                      queueLabelCount_  = {},
      const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *              pQueueLabels_     = {},
      uint32_t                                                      cmdBufLabelCount_ = {},
      const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *              pCmdBufLabels_    = {},
      uint32_t                                                      objectCount_      = {},
      const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT *     pObjects_         = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pMessageIdName( pMessageIdName_ )
      , messageIdNumber( messageIdNumber_ )
      , pMessage( pMessage_ )
      , queueLabelCount( queueLabelCount_ )
      , pQueueLabels( pQueueLabels_ )
      , cmdBufLabelCount( cmdBufLabelCount_ )
      , pCmdBufLabels( pCmdBufLabels_ )
      , objectCount( objectCount_ )
      , pObjects( pObjects_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DebugUtilsMessengerCallbackDataEXT(
      VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_,
      const char *                                                  pMessageIdName_,
      int32_t                                                       messageIdNumber_,
      const char *                                                  pMessage_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const &
        queueLabels_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const &
        cmdBufLabels_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const &
        objects_ = {} )
      : flags( flags_ )
      , pMessageIdName( pMessageIdName_ )
      , messageIdNumber( messageIdNumber_ )
      , pMessage( pMessage_ )
      , queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) )
      , pQueueLabels( queueLabels_.data() )
      , cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) )
      , pCmdBufLabels( cmdBufLabels_.data() )
      , objectCount( static_cast<uint32_t>( objects_.size() ) )
      , pObjects( objects_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT &
                            operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsMessengerCallbackDataEXT &
      operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
      return *this;
    }

    DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DebugUtilsMessengerCallbackDataEXT &
      setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
    {
      pMessageIdName = pMessageIdName_;
      return *this;
    }

    DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
    {
      messageIdNumber = messageIdNumber_;
      return *this;
    }

    DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT
    {
      pMessage = pMessage_;
      return *this;
    }

    DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueLabelCount = queueLabelCount_;
      return *this;
    }

    DebugUtilsMessengerCallbackDataEXT &
      setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueLabels = pQueueLabels_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DebugUtilsMessengerCallbackDataEXT & setQueueLabels(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const &
        queueLabels_ ) VULKAN_HPP_NOEXCEPT
    {
      queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
      pQueueLabels    = queueLabels_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
    {
      cmdBufLabelCount = cmdBufLabelCount_;
      return *this;
    }

    DebugUtilsMessengerCallbackDataEXT &
      setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
    {
      pCmdBufLabels = pCmdBufLabels_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const &
        cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
    {
      cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
      pCmdBufLabels    = cmdBufLabels_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
    {
      objectCount = objectCount_;
      return *this;
    }

    DebugUtilsMessengerCallbackDataEXT &
      setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT
    {
      pObjects = pObjects_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DebugUtilsMessengerCallbackDataEXT & setObjects(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const &
        objects_ ) VULKAN_HPP_NOEXCEPT
    {
      objectCount = static_cast<uint32_t>( objects_.size() );
      pObjects    = objects_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( this );
    }

    operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugUtilsMessengerCallbackDataEXT const & ) const = default;
#else
    bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( pMessageIdName == rhs.pMessageIdName ) && ( messageIdNumber == rhs.messageIdNumber ) &&
             ( pMessage == rhs.pMessage ) && ( queueLabelCount == rhs.queueLabelCount ) &&
             ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) &&
             ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) &&
             ( pObjects == rhs.pObjects );
    }

    bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags            = {};
    const char *                                                  pMessageIdName   = {};
    int32_t                                                       messageIdNumber  = {};
    const char *                                                  pMessage         = {};
    uint32_t                                                      queueLabelCount  = {};
    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *              pQueueLabels     = {};
    uint32_t                                                      cmdBufLabelCount = {};
    const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT *              pCmdBufLabels    = {};
    uint32_t                                                      objectCount      = {};
    const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT *     pObjects         = {};
  };
  static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DebugUtilsMessengerCallbackDataEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
  {
    using Type = DebugUtilsMessengerCallbackDataEXT;
  };

  struct DebugUtilsMessengerCreateInfoEXT
  {
    static const bool                    allowDuplicate              = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_           = {},
                                        VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {},
                                        VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT     messageType_     = {},
                                        PFN_vkDebugUtilsMessengerCallbackEXT                    pfnUserCallback_ = {},
                                        void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , messageSeverity( messageSeverity_ )
      , messageType( messageType_ )
      , pfnUserCallback( pfnUserCallback_ )
      , pUserData( pUserData_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT &
                            operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
      return *this;
    }

    DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DebugUtilsMessengerCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DebugUtilsMessengerCreateInfoEXT &
      setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
    {
      messageSeverity = messageSeverity_;
      return *this;
    }

    DebugUtilsMessengerCreateInfoEXT &
      setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
    {
      messageType = messageType_;
      return *this;
    }

    DebugUtilsMessengerCreateInfoEXT &
      setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnUserCallback = pfnUserCallback_;
      return *this;
    }

    DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }

    operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( this );
    }

    operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugUtilsMessengerCreateInfoEXT const & ) const = default;
#else
    bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( messageSeverity == rhs.messageSeverity ) && ( messageType == rhs.messageType ) &&
             ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData );
    }

    bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                     sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
    const void *                                            pNext = {};
    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
    VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
    VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT     messageType     = {};
    PFN_vkDebugUtilsMessengerCallbackEXT                    pfnUserCallback = {};
    void *                                                  pUserData       = {};
  };
  static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DebugUtilsMessengerCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
  {
    using Type = DebugUtilsMessengerCreateInfoEXT;
  };

  struct DebugUtilsObjectTagInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(
      VULKAN_HPP_NAMESPACE::ObjectType objectType_   = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
      uint64_t                         objectHandle_ = {},
      uint64_t                         tagName_      = {},
      size_t                           tagSize_      = {},
      const void *                     pTag_         = {} ) VULKAN_HPP_NOEXCEPT
      : objectType( objectType_ )
      , objectHandle( objectHandle_ )
      , tagName( tagName_ )
      , tagSize( tagSize_ )
      , pTag( pTag_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType                               objectType_,
                                uint64_t                                                       objectHandle_,
                                uint64_t                                                       tagName_,
                                VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
      : objectType( objectType_ )
      , objectHandle( objectHandle_ )
      , tagName( tagName_ )
      , tagSize( tag_.size() * sizeof( T ) )
      , pTag( tag_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT &
                            operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
      return *this;
    }

    DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
    {
      objectType = objectType_;
      return *this;
    }

    DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      objectHandle = objectHandle_;
      return *this;
    }

    DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
    {
      tagName = tagName_;
      return *this;
    }

    DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tagSize_;
      return *this;
    }

    DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT
    {
      pTag = pTag_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    DebugUtilsObjectTagInfoEXT &
      setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
    {
      tagSize = tag_.size() * sizeof( T );
      pTag    = tag_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( this );
    }

    operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default;
#else
    bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) &&
             ( objectHandle == rhs.objectHandle ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) &&
             ( pTag == rhs.pTag );
    }

    bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType        = StructureType::eDebugUtilsObjectTagInfoEXT;
    const void *                        pNext        = {};
    VULKAN_HPP_NAMESPACE::ObjectType    objectType   = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
    uint64_t                            objectHandle = {};
    uint64_t                            tagName      = {};
    size_t                              tagSize      = {};
    const void *                        pTag         = {};
  };
  static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DebugUtilsObjectTagInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
  {
    using Type = DebugUtilsObjectTagInfoEXT;
  };

  struct DedicatedAllocationBufferCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDedicatedAllocationBufferCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} )
      VULKAN_HPP_NOEXCEPT : dedicatedAllocation( dedicatedAllocation_ )
    {}

    VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DedicatedAllocationBufferCreateInfoNV(
          *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV &
                            operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DedicatedAllocationBufferCreateInfoNV &
      operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
      return *this;
    }

    DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DedicatedAllocationBufferCreateInfoNV &
      setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      dedicatedAllocation = dedicatedAllocation_;
      return *this;
    }

    operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV *>( this );
    }

    operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default;
#else
    bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation );
    }

    bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::eDedicatedAllocationBufferCreateInfoNV;
    const void *                        pNext               = {};
    VULKAN_HPP_NAMESPACE::Bool32        dedicatedAllocation = {};
  };
  static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DedicatedAllocationBufferCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
  {
    using Type = DedicatedAllocationBufferCreateInfoNV;
  };

  struct DedicatedAllocationImageCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDedicatedAllocationImageCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
      : dedicatedAllocation( dedicatedAllocation_ )
    {}

    VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV &
                            operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DedicatedAllocationImageCreateInfoNV &
      operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
      return *this;
    }

    DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DedicatedAllocationImageCreateInfoNV &
      setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
    {
      dedicatedAllocation = dedicatedAllocation_;
      return *this;
    }

    operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV *>( this );
    }

    operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default;
#else
    bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation );
    }

    bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::eDedicatedAllocationImageCreateInfoNV;
    const void *                        pNext               = {};
    VULKAN_HPP_NAMESPACE::Bool32        dedicatedAllocation = {};
  };
  static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DedicatedAllocationImageCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
  {
    using Type = DedicatedAllocationImageCreateInfoNV;
  };

  struct DedicatedAllocationMemoryAllocateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDedicatedAllocationMemoryAllocateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image  image_  = {},
                                               VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
      : image( image_ )
      , buffer( buffer_ )
    {}

    VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DedicatedAllocationMemoryAllocateInfoNV(
          *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV &
                            operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DedicatedAllocationMemoryAllocateInfoNV &
      operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
      return *this;
    }

    DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV *>( this );
    }

    operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default;
#else
    bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer );
    }

    bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
    const void *                        pNext  = {};
    VULKAN_HPP_NAMESPACE::Image         image  = {};
    VULKAN_HPP_NAMESPACE::Buffer        buffer = {};
  };
  static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) ==
                   sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DedicatedAllocationMemoryAllocateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
  {
    using Type = DedicatedAllocationMemoryAllocateInfoNV;
  };

  struct DescriptorPoolSize
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
                          uint32_t                             descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : type( type_ )
      , descriptorCount( descriptorCount_ )
    {}

    VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize &
                            operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
      return *this;
    }

    DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }

    operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorPoolSize *>( this );
    }

    operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorPoolSize *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorPoolSize const & ) const = default;
#else
    bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount );
    }

    bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DescriptorType type            = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
    uint32_t                             descriptorCount = {};
  };
  static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );

  struct DescriptorPoolCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags  flags_         = {},
                                uint32_t                                         maxSets_       = {},
                                uint32_t                                         poolSizeCount_ = {},
                                const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , maxSets( maxSets_ )
      , poolSizeCount( poolSizeCount_ )
      , pPoolSizes( pPoolSizes_ )
    {}

    VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorPoolCreateInfo(
      VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags                                                       flags_,
      uint32_t                                                                                              maxSets_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
      : flags( flags_ )
      , maxSets( maxSets_ )
      , poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) )
      , pPoolSizes( poolSizes_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo &
                            operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
      return *this;
    }

    DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSets = maxSets_;
      return *this;
    }

    DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      poolSizeCount = poolSizeCount_;
      return *this;
    }

    DescriptorPoolCreateInfo &
      setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
    {
      pPoolSizes = pPoolSizes_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorPoolCreateInfo & setPoolSizes(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
      VULKAN_HPP_NOEXCEPT
    {
      poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
      pPoolSizes    = poolSizes_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorPoolCreateInfo *>( this );
    }

    operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorPoolCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorPoolCreateInfo const & ) const = default;
#else
    bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) &&
             ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes );
    }

    bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType         = StructureType::eDescriptorPoolCreateInfo;
    const void *                                     pNext         = {};
    VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags  flags         = {};
    uint32_t                                         maxSets       = {};
    uint32_t                                         poolSizeCount = {};
    const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes    = {};
  };
  static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
  {
    using Type = DescriptorPoolCreateInfo;
  };

  struct DescriptorPoolInlineUniformBlockCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
    {}

    VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT(
      DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : DescriptorPoolInlineUniformBlockCreateInfoEXT(
          *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfoEXT &
                            operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorPoolInlineUniformBlockCreateInfoEXT &
      operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs );
      return *this;
    }

    DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DescriptorPoolInlineUniformBlockCreateInfoEXT &
      setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
    {
      maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
      return *this;
    }

    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT *>( this );
    }

    operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const & ) const = default;
#else
    bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
    }

    bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
    const void *                        pNext = {};
    uint32_t                            maxInlineUniformBlockBindings = {};
  };
  static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) ==
                   sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorPoolInlineUniformBlockCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT>
  {
    using Type = DescriptorPoolInlineUniformBlockCreateInfoEXT;
  };

  class DescriptorPool
  {
  public:
    using CType = VkDescriptorPool;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;

  public:
    VULKAN_HPP_CONSTEXPR         DescriptorPool() = default;
    VULKAN_HPP_CONSTEXPR         DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
      : m_descriptorPool( descriptorPool )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
    {
      m_descriptorPool = descriptorPool;
      return *this;
    }
#endif

    DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_descriptorPool = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorPool const & ) const = default;
#else
    bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorPool == rhs.m_descriptorPool;
    }

    bool operator!=( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorPool != rhs.m_descriptorPool;
    }

    bool operator<( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorPool < rhs.m_descriptorPool;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorPool;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorPool != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorPool == VK_NULL_HANDLE;
    }

  private:
    VkDescriptorPool m_descriptorPool = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDescriptorPool>
  {
    using type = VULKAN_HPP_NAMESPACE::DescriptorPool;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool>
  {
    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool>
  {
    using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorPool>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  class DescriptorSetLayout
  {
  public:
    using CType = VkDescriptorSetLayout;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;

  public:
    VULKAN_HPP_CONSTEXPR         DescriptorSetLayout() = default;
    VULKAN_HPP_CONSTEXPR         DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
      : m_descriptorSetLayout( descriptorSetLayout )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
    {
      m_descriptorSetLayout = descriptorSetLayout;
      return *this;
    }
#endif

    DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_descriptorSetLayout = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayout const & ) const = default;
#else
    bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
    }

    bool operator!=( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
    }

    bool operator<( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSetLayout;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSetLayout != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_descriptorSetLayout == VK_NULL_HANDLE;
    }

  private:
    VkDescriptorSetLayout m_descriptorSetLayout = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDescriptorSetLayout>
  {
    using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout>
  {
    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout>
  {
    using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct DescriptorSetAllocateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(
      VULKAN_HPP_NAMESPACE::DescriptorPool              descriptorPool_     = {},
      uint32_t                                          descriptorSetCount_ = {},
      const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_        = {} ) VULKAN_HPP_NOEXCEPT
      : descriptorPool( descriptorPool_ )
      , descriptorSetCount( descriptorSetCount_ )
      , pSetLayouts( pSetLayouts_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetAllocateInfo(
      VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const &
        setLayouts_ )
      : descriptorPool( descriptorPool_ )
      , descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) )
      , pSetLayouts( setLayouts_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo &
                            operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
      return *this;
    }

    DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DescriptorSetAllocateInfo &
      setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorPool = descriptorPool_;
      return *this;
    }

    DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = descriptorSetCount_;
      return *this;
    }

    DescriptorSetAllocateInfo &
      setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetLayouts = pSetLayouts_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetAllocateInfo & setSetLayouts(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const &
        setLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
      pSetLayouts        = setLayouts_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetAllocateInfo *>( this );
    }

    operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetAllocateInfo const & ) const = default;
#else
    bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) &&
             ( descriptorSetCount == rhs.descriptorSetCount ) && ( pSetLayouts == rhs.pSetLayouts );
    }

    bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType              = StructureType::eDescriptorSetAllocateInfo;
    const void *                                      pNext              = {};
    VULKAN_HPP_NAMESPACE::DescriptorPool              descriptorPool     = {};
    uint32_t                                          descriptorSetCount = {};
    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts        = {};
  };
  static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorSetAllocateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
  {
    using Type = DescriptorSetAllocateInfo;
  };

  struct DescriptorSetLayoutBinding
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(
      uint32_t                               binding_            = {},
      VULKAN_HPP_NAMESPACE::DescriptorType   descriptorType_     = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
      uint32_t                               descriptorCount_    = {},
      VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_         = {},
      const VULKAN_HPP_NAMESPACE::Sampler *  pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
      : binding( binding_ )
      , descriptorType( descriptorType_ )
      , descriptorCount( descriptorCount_ )
      , stageFlags( stageFlags_ )
      , pImmutableSamplers( pImmutableSamplers_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutBinding(
      uint32_t                                                                                   binding_,
      VULKAN_HPP_NAMESPACE::DescriptorType                                                       descriptorType_,
      VULKAN_HPP_NAMESPACE::ShaderStageFlags                                                     stageFlags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
      : binding( binding_ )
      , descriptorType( descriptorType_ )
      , descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) )
      , stageFlags( stageFlags_ )
      , pImmutableSamplers( immutableSamplers_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding &
                            operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
      return *this;
    }

    DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    DescriptorSetLayoutBinding &
      setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorType = descriptorType_;
      return *this;
    }

    DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }

    DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      stageFlags = stageFlags_;
      return *this;
    }

    DescriptorSetLayoutBinding &
      setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
    {
      pImmutableSamplers = pImmutableSamplers_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutBinding & setImmutableSamplers(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
      VULKAN_HPP_NOEXCEPT
    {
      descriptorCount    = static_cast<uint32_t>( immutableSamplers_.size() );
      pImmutableSamplers = immutableSamplers_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetLayoutBinding *>( this );
    }

    operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetLayoutBinding *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayoutBinding const & ) const = default;
#else
    bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) &&
             ( descriptorCount == rhs.descriptorCount ) && ( stageFlags == rhs.stageFlags ) &&
             ( pImmutableSamplers == rhs.pImmutableSamplers );
    }

    bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                               binding            = {};
    VULKAN_HPP_NAMESPACE::DescriptorType   descriptorType     = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
    uint32_t                               descriptorCount    = {};
    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags         = {};
    const VULKAN_HPP_NAMESPACE::Sampler *  pImmutableSamplers = {};
  };
  static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorSetLayoutBinding>::value,
                 "struct wrapper is not a standard layout!" );

  struct DescriptorSetLayoutBindingFlagsCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(
      uint32_t                                             bindingCount_  = {},
      const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT
      : bindingCount( bindingCount_ )
      , pBindingFlags( pBindingFlags_ )
    {}

    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(
      DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT
      : DescriptorSetLayoutBindingFlagsCreateInfo(
          *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutBindingFlagsCreateInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const &
        bindingFlags_ )
      : bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo &
                            operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutBindingFlagsCreateInfo &
      operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
      return *this;
    }

    DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindingCount = bindingCount_;
      return *this;
    }

    DescriptorSetLayoutBindingFlagsCreateInfo &
      setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      pBindingFlags = pBindingFlags_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const &
        bindingFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      bindingCount  = static_cast<uint32_t>( bindingFlags_.size() );
      pBindingFlags = bindingFlags_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
    }

    operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default;
#else
    bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) &&
             ( pBindingFlags == rhs.pBindingFlags );
    }

    bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType        = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
    const void *                        pNext        = {};
    uint32_t                            bindingCount = {};
    const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {};
  };
  static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) ==
                   sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorSetLayoutBindingFlagsCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
  {
    using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
  };
  using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;

  struct DescriptorSetLayoutCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(
      VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags     flags_        = {},
      uint32_t                                                 bindingCount_ = {},
      const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_    = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , bindingCount( bindingCount_ )
      , pBindings( pBindings_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutCreateInfo(
      VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const &
        bindings_ )
      : flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo &
                            operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
      return *this;
    }

    DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DescriptorSetLayoutCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
    {
      bindingCount = bindingCount_;
      return *this;
    }

    DescriptorSetLayoutCreateInfo &
      setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT
    {
      pBindings = pBindings_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetLayoutCreateInfo & setBindings(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const &
        bindings_ ) VULKAN_HPP_NOEXCEPT
    {
      bindingCount = static_cast<uint32_t>( bindings_.size() );
      pBindings    = bindings_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( this );
    }

    operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default;
#else
    bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( bindingCount == rhs.bindingCount ) && ( pBindings == rhs.pBindings );
    }

    bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                      sType = StructureType::eDescriptorSetLayoutCreateInfo;
    const void *                                             pNext = {};
    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags     flags = {};
    uint32_t                                                 bindingCount = {};
    const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings    = {};
  };
  static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorSetLayoutCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
  {
    using Type = DescriptorSetLayoutCreateInfo;
  };

  struct DescriptorSetLayoutSupport
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT
      : supported( supported_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutSupport &
                            operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
      return *this;
    }

    operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetLayoutSupport *>( this );
    }

    operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetLayoutSupport *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetLayoutSupport const & ) const = default;
#else
    bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported );
    }

    bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType     = StructureType::eDescriptorSetLayoutSupport;
    void *                              pNext     = {};
    VULKAN_HPP_NAMESPACE::Bool32        supported = {};
  };
  static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorSetLayoutSupport>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
  {
    using Type = DescriptorSetLayoutSupport;
  };
  using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;

  struct DescriptorSetVariableDescriptorCountAllocateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t         descriptorSetCount_ = {},
                                                        const uint32_t * pDescriptorCounts_  = {} ) VULKAN_HPP_NOEXCEPT
      : descriptorSetCount( descriptorSetCount_ )
      , pDescriptorCounts( pDescriptorCounts_ )
    {}

    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(
      DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT
      : DescriptorSetVariableDescriptorCountAllocateInfo(
          *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetVariableDescriptorCountAllocateInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ )
      : descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) )
      , pDescriptorCounts( descriptorCounts_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo &
                            operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetVariableDescriptorCountAllocateInfo &
      operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
      return *this;
    }

    DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DescriptorSetVariableDescriptorCountAllocateInfo &
      setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = descriptorSetCount_;
      return *this;
    }

    DescriptorSetVariableDescriptorCountAllocateInfo &
      setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescriptorCounts = pDescriptorCounts_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
      pDescriptorCounts  = descriptorCounts_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
    }

    operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default;
#else
    bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) &&
             ( pDescriptorCounts == rhs.pDescriptorCounts );
    }

    bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
    const void *                        pNext = {};
    uint32_t                            descriptorSetCount = {};
    const uint32_t *                    pDescriptorCounts  = {};
  };
  static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) ==
                   sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountAllocateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
  {
    using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
  };
  using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;

  struct DescriptorSetVariableDescriptorCountLayoutSupport
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
    {}

    VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(
      DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs )
      VULKAN_HPP_NOEXCEPT
      : DescriptorSetVariableDescriptorCountLayoutSupport(
          *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountLayoutSupport &
                            operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorSetVariableDescriptorCountLayoutSupport &
      operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
      return *this;
    }

    operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
    }

    operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default;
#else
    bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
    }

    bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
    void *                              pNext = {};
    uint32_t                            maxVariableDescriptorCount = {};
  };
  static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) ==
                   sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountLayoutSupport>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
  {
    using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
  };
  using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;

  struct DescriptorUpdateTemplateEntry
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(
      uint32_t                             dstBinding_      = {},
      uint32_t                             dstArrayElement_ = {},
      uint32_t                             descriptorCount_ = {},
      VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_  = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
      size_t                               offset_          = {},
      size_t                               stride_          = {} ) VULKAN_HPP_NOEXCEPT
      : dstBinding( dstBinding_ )
      , dstArrayElement( dstArrayElement_ )
      , descriptorCount( descriptorCount_ )
      , descriptorType( descriptorType_ )
      , offset( offset_ )
      , stride( stride_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorUpdateTemplateEntry( *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry &
                            operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>( &rhs );
      return *this;
    }

    DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      dstBinding = dstBinding_;
      return *this;
    }

    DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
    {
      dstArrayElement = dstArrayElement_;
      return *this;
    }

    DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorCount = descriptorCount_;
      return *this;
    }

    DescriptorUpdateTemplateEntry &
      setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorType = descriptorType_;
      return *this;
    }

    DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry *>( this );
    }

    operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorUpdateTemplateEntry *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default;
#else
    bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) &&
             ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) &&
             ( offset == rhs.offset ) && ( stride == rhs.stride );
    }

    bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                             dstBinding      = {};
    uint32_t                             dstArrayElement = {};
    uint32_t                             descriptorCount = {};
    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType  = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
    size_t                               offset          = {};
    size_t                               stride          = {};
  };
  static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorUpdateTemplateEntry>::value,
                 "struct wrapper is not a standard layout!" );
  using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;

  struct DescriptorUpdateTemplateCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDescriptorUpdateTemplateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(
      VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags   flags_                      = {},
      uint32_t                                                    descriptorUpdateEntryCount_ = {},
      const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_   = {},
      VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType          templateType_ =
        VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet,
      VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {},
      VULKAN_HPP_NAMESPACE::PipelineBindPoint   pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
      VULKAN_HPP_NAMESPACE::PipelineLayout      pipelineLayout_    = {},
      uint32_t                                  set_               = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ )
      , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ )
      , templateType( templateType_ )
      , descriptorSetLayout( descriptorSetLayout_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , pipelineLayout( pipelineLayout_ )
      , set( set_ )
    {}

    VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorUpdateTemplateCreateInfo(
      VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const &
                                                         descriptorUpdateEntries_,
      VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ =
        VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet,
      VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {},
      VULKAN_HPP_NAMESPACE::PipelineBindPoint   pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
      VULKAN_HPP_NAMESPACE::PipelineLayout      pipelineLayout_    = {},
      uint32_t                                  set_               = {} )
      : flags( flags_ )
      , descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) )
      , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() )
      , templateType( templateType_ )
      , descriptorSetLayout( descriptorSetLayout_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , pipelineLayout( pipelineLayout_ )
      , set( set_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo &
                            operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DescriptorUpdateTemplateCreateInfo &
      operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
      return *this;
    }

    DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DescriptorUpdateTemplateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DescriptorUpdateTemplateCreateInfo &
      setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
      return *this;
    }

    DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries(
      const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const &
        descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
      pDescriptorUpdateEntries   = descriptorUpdateEntries_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DescriptorUpdateTemplateCreateInfo &
      setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
    {
      templateType = templateType_;
      return *this;
    }

    DescriptorUpdateTemplateCreateInfo &
      setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorSetLayout = descriptorSetLayout_;
      return *this;
    }

    DescriptorUpdateTemplateCreateInfo &
      setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    DescriptorUpdateTemplateCreateInfo &
      setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineLayout = pipelineLayout_;
      return *this;
    }

    DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
    {
      set = set_;
      return *this;
    }

    operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( this );
    }

    operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default;
#else
    bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) &&
             ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) &&
             ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
             ( pipelineLayout == rhs.pipelineLayout ) && ( set == rhs.set );
    }

    bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags   flags                      = {};
    uint32_t                                                    descriptorUpdateEntryCount = {};
    const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries   = {};
    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType          templateType =
      VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
    VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
    VULKAN_HPP_NAMESPACE::PipelineBindPoint   pipelineBindPoint   = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
    VULKAN_HPP_NAMESPACE::PipelineLayout      pipelineLayout      = {};
    uint32_t                                  set                 = {};
  };
  static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DescriptorUpdateTemplateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
  {
    using Type = DescriptorUpdateTemplateCreateInfo;
  };
  using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;

  struct DeviceQueueCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_            = {},
                                                uint32_t                                     queueFamilyIndex_ = {},
                                                uint32_t                                     queueCount_       = {},
                                                const float * pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , queueFamilyIndex( queueFamilyIndex_ )
      , queueCount( queueCount_ )
      , pQueuePriorities( pQueuePriorities_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags                       flags_,
                           uint32_t                                                           queueFamilyIndex_,
                           VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ )
      : flags( flags_ )
      , queueFamilyIndex( queueFamilyIndex_ )
      , queueCount( static_cast<uint32_t>( queuePriorities_.size() ) )
      , pQueuePriorities( queuePriorities_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo &
                            operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
      return *this;
    }

    DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueCount = queueCount_;
      return *this;
    }

    DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueuePriorities = pQueuePriorities_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceQueueCreateInfo & setQueuePriorities(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
    {
      queueCount       = static_cast<uint32_t>( queuePriorities_.size() );
      pQueuePriorities = queuePriorities_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceQueueCreateInfo *>( this );
    }

    operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceQueueCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceQueueCreateInfo const & ) const = default;
#else
    bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueCount == rhs.queueCount ) &&
             ( pQueuePriorities == rhs.pQueuePriorities );
    }

    bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType            = StructureType::eDeviceQueueCreateInfo;
    const void *                                 pNext            = {};
    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags            = {};
    uint32_t                                     queueFamilyIndex = {};
    uint32_t                                     queueCount       = {};
    const float *                                pQueuePriorities = {};
  };
  static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
  {
    using Type = DeviceQueueCreateInfo;
  };

  struct PhysicalDeviceFeatures
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_                      = {},
                              VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_                     = {},
                              VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_                          = {},
                              VULKAN_HPP_NAMESPACE::Bool32 independentBlend_                        = {},
                              VULKAN_HPP_NAMESPACE::Bool32 geometryShader_                          = {},
                              VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_                      = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_                       = {},
                              VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_                            = {},
                              VULKAN_HPP_NAMESPACE::Bool32 logicOp_                                 = {},
                              VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_                       = {},
                              VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_               = {},
                              VULKAN_HPP_NAMESPACE::Bool32 depthClamp_                              = {},
                              VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_                          = {},
                              VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_                        = {},
                              VULKAN_HPP_NAMESPACE::Bool32 depthBounds_                             = {},
                              VULKAN_HPP_NAMESPACE::Bool32 wideLines_                               = {},
                              VULKAN_HPP_NAMESPACE::Bool32 largePoints_                             = {},
                              VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_                              = {},
                              VULKAN_HPP_NAMESPACE::Bool32 multiViewport_                           = {},
                              VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_                       = {},
                              VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_                  = {},
                              VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_              = {},
                              VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_                    = {},
                              VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_                   = {},
                              VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_                 = {},
                              VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_          = {},
                              VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_                = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_  = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_               = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_       = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_           = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_     = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_    = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_  = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_  = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_                      = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_                      = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_                           = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_                             = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_                             = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_                 = {},
                              VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_                    = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_                           = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_                   = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_                  = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_                  = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_                 = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_                 = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_                 = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_                = {},
                              VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_                  = {},
                              VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_                 = {},
                              VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT
      : robustBufferAccess( robustBufferAccess_ )
      , fullDrawIndexUint32( fullDrawIndexUint32_ )
      , imageCubeArray( imageCubeArray_ )
      , independentBlend( independentBlend_ )
      , geometryShader( geometryShader_ )
      , tessellationShader( tessellationShader_ )
      , sampleRateShading( sampleRateShading_ )
      , dualSrcBlend( dualSrcBlend_ )
      , logicOp( logicOp_ )
      , multiDrawIndirect( multiDrawIndirect_ )
      , drawIndirectFirstInstance( drawIndirectFirstInstance_ )
      , depthClamp( depthClamp_ )
      , depthBiasClamp( depthBiasClamp_ )
      , fillModeNonSolid( fillModeNonSolid_ )
      , depthBounds( depthBounds_ )
      , wideLines( wideLines_ )
      , largePoints( largePoints_ )
      , alphaToOne( alphaToOne_ )
      , multiViewport( multiViewport_ )
      , samplerAnisotropy( samplerAnisotropy_ )
      , textureCompressionETC2( textureCompressionETC2_ )
      , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
      , textureCompressionBC( textureCompressionBC_ )
      , occlusionQueryPrecise( occlusionQueryPrecise_ )
      , pipelineStatisticsQuery( pipelineStatisticsQuery_ )
      , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
      , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
      , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
      , shaderImageGatherExtended( shaderImageGatherExtended_ )
      , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
      , shaderStorageImageMultisample( shaderStorageImageMultisample_ )
      , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
      , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
      , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
      , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
      , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
      , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
      , shaderClipDistance( shaderClipDistance_ )
      , shaderCullDistance( shaderCullDistance_ )
      , shaderFloat64( shaderFloat64_ )
      , shaderInt64( shaderInt64_ )
      , shaderInt16( shaderInt16_ )
      , shaderResourceResidency( shaderResourceResidency_ )
      , shaderResourceMinLod( shaderResourceMinLod_ )
      , sparseBinding( sparseBinding_ )
      , sparseResidencyBuffer( sparseResidencyBuffer_ )
      , sparseResidencyImage2D( sparseResidencyImage2D_ )
      , sparseResidencyImage3D( sparseResidencyImage3D_ )
      , sparseResidency2Samples( sparseResidency2Samples_ )
      , sparseResidency4Samples( sparseResidency4Samples_ )
      , sparseResidency8Samples( sparseResidency8Samples_ )
      , sparseResidency16Samples( sparseResidency16Samples_ )
      , sparseResidencyAliased( sparseResidencyAliased_ )
      , variableMultisampleRate( variableMultisampleRate_ )
      , inheritedQueries( inheritedQueries_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures &
                            operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceFeatures &
      setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      robustBufferAccess = robustBufferAccess_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
    {
      fullDrawIndexUint32 = fullDrawIndexUint32_;
      return *this;
    }

    PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
    {
      imageCubeArray = imageCubeArray_;
      return *this;
    }

    PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
    {
      independentBlend = independentBlend_;
      return *this;
    }

    PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryShader = geometryShader_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
    {
      tessellationShader = tessellationShader_;
      return *this;
    }

    PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleRateShading = sampleRateShading_;
      return *this;
    }

    PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
    {
      dualSrcBlend = dualSrcBlend_;
      return *this;
    }

    PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
    {
      logicOp = logicOp_;
      return *this;
    }

    PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
    {
      multiDrawIndirect = multiDrawIndirect_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
    {
      drawIndirectFirstInstance = drawIndirectFirstInstance_;
      return *this;
    }

    PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClamp = depthClamp_;
      return *this;
    }

    PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasClamp = depthBiasClamp_;
      return *this;
    }

    PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
    {
      fillModeNonSolid = fillModeNonSolid_;
      return *this;
    }

    PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBounds = depthBounds_;
      return *this;
    }

    PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
    {
      wideLines = wideLines_;
      return *this;
    }

    PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
    {
      largePoints = largePoints_;
      return *this;
    }

    PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaToOne = alphaToOne_;
      return *this;
    }

    PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
    {
      multiViewport = multiViewport_;
      return *this;
    }

    PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerAnisotropy = samplerAnisotropy_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
    {
      textureCompressionETC2 = textureCompressionETC2_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
    {
      textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
    {
      textureCompressionBC = textureCompressionBC_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
    {
      occlusionQueryPrecise = occlusionQueryPrecise_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStatisticsQuery = pipelineStatisticsQuery_;
      return *this;
    }

    PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics(
      VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize(
      VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderImageGatherExtended = shaderImageGatherExtended_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderStorageImageMultisample(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageMultisample = shaderStorageImageMultisample_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderClipDistance = shaderClipDistance_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderCullDistance = shaderCullDistance_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloat64 = shaderFloat64_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInt64 = shaderInt64_;
      return *this;
    }

    PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInt16 = shaderInt16_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderResourceResidency = shaderResourceResidency_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderResourceMinLod = shaderResourceMinLod_;
      return *this;
    }

    PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseBinding = sparseBinding_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidencyBuffer = sparseResidencyBuffer_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidencyImage2D = sparseResidencyImage2D_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidencyImage3D = sparseResidencyImage3D_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidency2Samples = sparseResidency2Samples_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidency4Samples = sparseResidency4Samples_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidency8Samples = sparseResidency8Samples_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidency16Samples = sparseResidency16Samples_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseResidencyAliased = sparseResidencyAliased_;
      return *this;
    }

    PhysicalDeviceFeatures &
      setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
    {
      variableMultisampleRate = variableMultisampleRate_;
      return *this;
    }

    PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
    {
      inheritedQueries = inheritedQueries_;
      return *this;
    }

    operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFeatures *>( this );
    }

    operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) &&
             ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) &&
             ( geometryShader == rhs.geometryShader ) && ( tessellationShader == rhs.tessellationShader ) &&
             ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) &&
             ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) &&
             ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && ( depthClamp == rhs.depthClamp ) &&
             ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) &&
             ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) &&
             ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) &&
             ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) &&
             ( textureCompressionETC2 == rhs.textureCompressionETC2 ) &&
             ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) &&
             ( textureCompressionBC == rhs.textureCompressionBC ) &&
             ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) &&
             ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) &&
             ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) &&
             ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) &&
             ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) &&
             ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) &&
             ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) &&
             ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) &&
             ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) &&
             ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) &&
             ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) &&
             ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) &&
             ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) &&
             ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) &&
             ( shaderClipDistance == rhs.shaderClipDistance ) && ( shaderCullDistance == rhs.shaderCullDistance ) &&
             ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) &&
             ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) &&
             ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) &&
             ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) &&
             ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) &&
             ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) &&
             ( sparseResidency2Samples == rhs.sparseResidency2Samples ) &&
             ( sparseResidency4Samples == rhs.sparseResidency4Samples ) &&
             ( sparseResidency8Samples == rhs.sparseResidency8Samples ) &&
             ( sparseResidency16Samples == rhs.sparseResidency16Samples ) &&
             ( sparseResidencyAliased == rhs.sparseResidencyAliased ) &&
             ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries );
    }

    bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess                      = {};
    VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32                     = {};
    VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray                          = {};
    VULKAN_HPP_NAMESPACE::Bool32 independentBlend                        = {};
    VULKAN_HPP_NAMESPACE::Bool32 geometryShader                          = {};
    VULKAN_HPP_NAMESPACE::Bool32 tessellationShader                      = {};
    VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading                       = {};
    VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend                            = {};
    VULKAN_HPP_NAMESPACE::Bool32 logicOp                                 = {};
    VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect                       = {};
    VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance               = {};
    VULKAN_HPP_NAMESPACE::Bool32 depthClamp                              = {};
    VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp                          = {};
    VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid                        = {};
    VULKAN_HPP_NAMESPACE::Bool32 depthBounds                             = {};
    VULKAN_HPP_NAMESPACE::Bool32 wideLines                               = {};
    VULKAN_HPP_NAMESPACE::Bool32 largePoints                             = {};
    VULKAN_HPP_NAMESPACE::Bool32 alphaToOne                              = {};
    VULKAN_HPP_NAMESPACE::Bool32 multiViewport                           = {};
    VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy                       = {};
    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2                  = {};
    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR              = {};
    VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC                    = {};
    VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise                   = {};
    VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery                 = {};
    VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics          = {};
    VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics                = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize  = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended               = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats       = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample           = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat     = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat    = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing  = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing  = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance                      = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance                      = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64                           = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderInt64                             = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderInt16                             = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency                 = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod                    = {};
    VULKAN_HPP_NAMESPACE::Bool32 sparseBinding                           = {};
    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer                   = {};
    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D                  = {};
    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D                  = {};
    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples                 = {};
    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples                 = {};
    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples                 = {};
    VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples                = {};
    VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased                  = {};
    VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate                 = {};
    VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries                        = {};
  };
  static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );

  struct DeviceCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_                         = {},
                                           uint32_t                                queueCreateInfoCount_          = {},
                                           const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {},
                                           uint32_t                                            enabledLayerCount_ = {},
                                           const char * const * ppEnabledLayerNames_                              = {},
                                           uint32_t             enabledExtensionCount_                            = {},
                                           const char * const * ppEnabledExtensionNames_                          = {},
                                           const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {} )
      VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , queueCreateInfoCount( queueCreateInfoCount_ )
      , pQueueCreateInfos( pQueueCreateInfos_ )
      , enabledLayerCount( enabledLayerCount_ )
      , ppEnabledLayerNames( ppEnabledLayerNames_ )
      , enabledExtensionCount( enabledExtensionCount_ )
      , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
      , pEnabledFeatures( pEnabledFeatures_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceCreateInfo(
      VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const &
                                                                                queueCreateInfos_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_     = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {},
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures *                      pEnabledFeatures_       = {} )
      : flags( flags_ )
      , queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) )
      , pQueueCreateInfos( queueCreateInfos_.data() )
      , enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) )
      , ppEnabledLayerNames( pEnabledLayerNames_.data() )
      , enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) )
      , ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
      , pEnabledFeatures( pEnabledFeatures_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
      return *this;
    }

    DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueCreateInfoCount = queueCreateInfoCount_;
      return *this;
    }

    DeviceCreateInfo &
      setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueCreateInfos = pQueueCreateInfos_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceCreateInfo & setQueueCreateInfos(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const &
        queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
      pQueueCreateInfos    = queueCreateInfos_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledLayerCount = enabledLayerCount_;
      return *this;
    }

    DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
    {
      ppEnabledLayerNames = ppEnabledLayerNames_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const &
                                                pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledLayerCount   = static_cast<uint32_t>( pEnabledLayerNames_.size() );
      ppEnabledLayerNames = pEnabledLayerNames_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledExtensionCount = enabledExtensionCount_;
      return *this;
    }

    DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
    {
      ppEnabledExtensionNames = ppEnabledExtensionNames_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceCreateInfo & setPEnabledExtensionNames(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ )
      VULKAN_HPP_NOEXCEPT
    {
      enabledExtensionCount   = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceCreateInfo &
      setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      pEnabledFeatures = pEnabledFeatures_;
      return *this;
    }

    operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceCreateInfo *>( this );
    }

    operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceCreateInfo const & ) const = default;
#else
    bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) &&
             ( enabledLayerCount == rhs.enabledLayerCount ) && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) &&
             ( enabledExtensionCount == rhs.enabledExtensionCount ) &&
             ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) && ( pEnabledFeatures == rhs.pEnabledFeatures );
    }

    bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                  sType                   = StructureType::eDeviceCreateInfo;
    const void *                                         pNext                   = {};
    VULKAN_HPP_NAMESPACE::DeviceCreateFlags              flags                   = {};
    uint32_t                                             queueCreateInfoCount    = {};
    const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo *  pQueueCreateInfos       = {};
    uint32_t                                             enabledLayerCount       = {};
    const char * const *                                 ppEnabledLayerNames     = {};
    uint32_t                                             enabledExtensionCount   = {};
    const char * const *                                 ppEnabledExtensionNames = {};
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures        = {};
  };
  static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceCreateInfo>
  {
    using Type = DeviceCreateInfo;
  };

  struct DeviceDeviceMemoryReportCreateInfoEXT
  {
    static const bool                    allowDuplicate = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_           = {},
                                             PFN_vkDeviceMemoryReportCallbackEXT              pfnUserCallback_ = {},
                                             void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pfnUserCallback( pfnUserCallback_ )
      , pUserData( pUserData_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceDeviceMemoryReportCreateInfoEXT(
          *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT &
                            operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceDeviceMemoryReportCreateInfoEXT &
      operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
      return *this;
    }

    DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceDeviceMemoryReportCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DeviceDeviceMemoryReportCreateInfoEXT &
      setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
    {
      pfnUserCallback = pfnUserCallback_;
      return *this;
    }

    DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }

    operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT *>( this );
    }

    operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const & ) const = default;
#else
    bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData );
    }

    bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
    const void *                                     pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
    PFN_vkDeviceMemoryReportCallbackEXT              pfnUserCallback = {};
    void *                                           pUserData       = {};
  };
  static_assert( sizeof( DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceDeviceMemoryReportCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
  {
    using Type = DeviceDeviceMemoryReportCreateInfoEXT;
  };

  struct DeviceDiagnosticsConfigCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDeviceDiagnosticsConfigCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(
      VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT : flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV &
                            operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceDiagnosticsConfigCreateInfoNV &
      operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
      return *this;
    }

    DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceDiagnosticsConfigCreateInfoNV &
      setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV *>( this );
    }

    operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default;
#else
    bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
    }

    bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                  sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
    const void *                                         pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
  };
  static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceDiagnosticsConfigCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
  {
    using Type = DeviceDiagnosticsConfigCreateInfoNV;
  };

  struct DeviceEventInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ =
                            VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT
      : deviceEvent( deviceEvent_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT &
                            operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
      return *this;
    }

    DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceEvent = deviceEvent_;
      return *this;
    }

    operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceEventInfoEXT *>( this );
    }

    operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceEventInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceEventInfoEXT const & ) const = default;
#else
    bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent );
    }

    bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType       = StructureType::eDeviceEventInfoEXT;
    const void *                             pNext       = {};
    VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
  };
  static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
  {
    using Type = DeviceEventInfoEXT;
  };

  struct DeviceGroupBindSparseInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {},
                                                    uint32_t memoryDeviceIndex_   = {} ) VULKAN_HPP_NOEXCEPT
      : resourceDeviceIndex( resourceDeviceIndex_ )
      , memoryDeviceIndex( memoryDeviceIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupBindSparseInfo( *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo &
                            operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>( &rhs );
      return *this;
    }

    DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      resourceDeviceIndex = resourceDeviceIndex_;
      return *this;
    }

    DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryDeviceIndex = memoryDeviceIndex_;
      return *this;
    }

    operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupBindSparseInfo *>( this );
    }

    operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupBindSparseInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default;
#else
    bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) &&
             ( memoryDeviceIndex == rhs.memoryDeviceIndex );
    }

    bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::eDeviceGroupBindSparseInfo;
    const void *                        pNext               = {};
    uint32_t                            resourceDeviceIndex = {};
    uint32_t                            memoryDeviceIndex   = {};
  };
  static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
  {
    using Type = DeviceGroupBindSparseInfo;
  };
  using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;

  struct DeviceGroupCommandBufferBeginInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDeviceGroupCommandBufferBeginInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : deviceMask( deviceMask_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo &
                            operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
      return *this;
    }

    DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }

    operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo *>( this );
    }

    operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default;
#else
    bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask );
    }

    bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType      = StructureType::eDeviceGroupCommandBufferBeginInfo;
    const void *                        pNext      = {};
    uint32_t                            deviceMask = {};
  };
  static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
  {
    using Type = DeviceGroupCommandBufferBeginInfo;
  };
  using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;

  class DisplayKHR
  {
  public:
    using CType = VkDisplayKHR;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;

  public:
    VULKAN_HPP_CONSTEXPR         DisplayKHR() = default;
    VULKAN_HPP_CONSTEXPR         DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
    {
      m_displayKHR = displayKHR;
      return *this;
    }
#endif

    DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_displayKHR = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayKHR const & ) const = default;
#else
    bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_displayKHR == rhs.m_displayKHR;
    }

    bool operator!=( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_displayKHR != rhs.m_displayKHR;
    }

    bool operator<( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_displayKHR < rhs.m_displayKHR;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT
    {
      return m_displayKHR;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_displayKHR != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_displayKHR == VK_NULL_HANDLE;
    }

  private:
    VkDisplayKHR m_displayKHR = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDisplayKHR>
  {
    using type = VULKAN_HPP_NAMESPACE::DisplayKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayKHR>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct PerformanceConfigurationAcquireInfoINTEL
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePerformanceConfigurationAcquireInfoINTEL;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(
      VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ =
        VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated )
      VULKAN_HPP_NOEXCEPT : type( type_ )
    {}

    VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(
      PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PerformanceConfigurationAcquireInfoINTEL(
          *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL &
                            operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceConfigurationAcquireInfoINTEL &
      operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
      return *this;
    }

    PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PerformanceConfigurationAcquireInfoINTEL &
      setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( this );
    }

    operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default;
#else
    bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type );
    }

    bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type =
      VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
  };
  static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) ==
                   sizeof( VkPerformanceConfigurationAcquireInfoINTEL ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PerformanceConfigurationAcquireInfoINTEL>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
  {
    using Type = PerformanceConfigurationAcquireInfoINTEL;
  };

  class PerformanceConfigurationINTEL
  {
  public:
    using CType = VkPerformanceConfigurationINTEL;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;

  public:
    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default;
    VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT
      PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
      : m_performanceConfigurationINTEL( performanceConfigurationINTEL )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    PerformanceConfigurationINTEL &
      operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
    {
      m_performanceConfigurationINTEL = performanceConfigurationINTEL;
      return *this;
    }
#endif

    PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_performanceConfigurationINTEL = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceConfigurationINTEL const & ) const = default;
#else
    bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL;
    }

    bool operator!=( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL;
    }

    bool operator<( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
    {
      return m_performanceConfigurationINTEL;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_performanceConfigurationINTEL != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_performanceConfigurationINTEL == VK_NULL_HANDLE;
    }

  private:
    VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) ==
                   sizeof( VkPerformanceConfigurationINTEL ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePerformanceConfigurationINTEL>
  {
    using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL>
  {
    using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct MemoryAllocateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {},
                                             uint32_t memoryTypeIndex_                        = {} ) VULKAN_HPP_NOEXCEPT
      : allocationSize( allocationSize_ )
      , memoryTypeIndex( memoryTypeIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryAllocateInfo( *reinterpret_cast<MemoryAllocateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo &
                            operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>( &rhs );
      return *this;
    }

    MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
    {
      allocationSize = allocationSize_;
      return *this;
    }

    MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryTypeIndex = memoryTypeIndex_;
      return *this;
    }

    operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryAllocateInfo *>( this );
    }

    operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryAllocateInfo const & ) const = default;
#else
    bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) &&
             ( memoryTypeIndex == rhs.memoryTypeIndex );
    }

    bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::eMemoryAllocateInfo;
    const void *                        pNext           = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    allocationSize  = {};
    uint32_t                            memoryTypeIndex = {};
  };
  static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
  {
    using Type = MemoryAllocateInfo;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoBindMemoryKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBindMemoryKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( uint32_t                           memoryBindIndex_ = {},
                                             VULKAN_HPP_NAMESPACE::DeviceMemory memory_          = {},
                                             VULKAN_HPP_NAMESPACE::DeviceSize   memoryOffset_    = {},
                                             VULKAN_HPP_NAMESPACE::DeviceSize   memorySize_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryBindIndex( memoryBindIndex_ )
      , memory( memory_ )
      , memoryOffset( memoryOffset_ )
      , memorySize( memorySize_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoBindMemoryKHR( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoBindMemoryKHR( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoBindMemoryKHR( *reinterpret_cast<VideoBindMemoryKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoBindMemoryKHR &
                            operator=( VideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoBindMemoryKHR & operator=( VkVideoBindMemoryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR const *>( &rhs );
      return *this;
    }

    VideoBindMemoryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoBindMemoryKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryBindIndex = memoryBindIndex_;
      return *this;
    }

    VideoBindMemoryKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    VideoBindMemoryKHR & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryOffset = memoryOffset_;
      return *this;
    }

    VideoBindMemoryKHR & setMemorySize( VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ ) VULKAN_HPP_NOEXCEPT
    {
      memorySize = memorySize_;
      return *this;
    }

    operator VkVideoBindMemoryKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoBindMemoryKHR *>( this );
    }

    operator VkVideoBindMemoryKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoBindMemoryKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoBindMemoryKHR const & ) const = default;
#  else
    bool operator==( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) &&
             ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize );
    }

    bool operator!=( VideoBindMemoryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::eVideoBindMemoryKHR;
    const void *                        pNext           = {};
    uint32_t                            memoryBindIndex = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory  memory          = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    memoryOffset    = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    memorySize      = {};
  };
  static_assert( sizeof( VideoBindMemoryKHR ) == sizeof( VkVideoBindMemoryKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoBindMemoryKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoBindMemoryKHR>
  {
    using Type = VideoBindMemoryKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  class DeferredOperationKHR
  {
  public:
    using CType = VkDeferredOperationKHR;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;

  public:
    VULKAN_HPP_CONSTEXPR         DeferredOperationKHR() = default;
    VULKAN_HPP_CONSTEXPR         DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT
      : m_deferredOperationKHR( deferredOperationKHR )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT
    {
      m_deferredOperationKHR = deferredOperationKHR;
      return *this;
    }
#endif

    DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_deferredOperationKHR = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeferredOperationKHR const & ) const = default;
#else
    bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_deferredOperationKHR == rhs.m_deferredOperationKHR;
    }

    bool operator!=( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_deferredOperationKHR != rhs.m_deferredOperationKHR;
    }

    bool operator<( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_deferredOperationKHR < rhs.m_deferredOperationKHR;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT
    {
      return m_deferredOperationKHR;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_deferredOperationKHR != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_deferredOperationKHR == VK_NULL_HANDLE;
    }

  private:
    VkDeferredOperationKHR m_deferredOperationKHR = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDeferredOperationKHR>
  {
    using type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  class PipelineCache
  {
  public:
    using CType = VkPipelineCache;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;

  public:
    VULKAN_HPP_CONSTEXPR         PipelineCache() = default;
    VULKAN_HPP_CONSTEXPR         PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
      : m_pipelineCache( pipelineCache )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
    {
      m_pipelineCache = pipelineCache;
      return *this;
    }
#endif

    PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_pipelineCache = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCache const & ) const = default;
#else
    bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineCache == rhs.m_pipelineCache;
    }

    bool operator!=( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineCache != rhs.m_pipelineCache;
    }

    bool operator<( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineCache < rhs.m_pipelineCache;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineCache;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineCache != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_pipelineCache == VK_NULL_HANDLE;
    }

  private:
    VkPipelineCache m_pipelineCache = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePipelineCache>
  {
    using type = VULKAN_HPP_NAMESPACE::PipelineCache;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache>
  {
    using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache>
  {
    using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineCache>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct EventCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : EventCreateInfo( *reinterpret_cast<EventCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>( &rhs );
      return *this;
    }

    EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkEventCreateInfo *>( this );
    }

    operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkEventCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( EventCreateInfo const & ) const = default;
#else
    bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
    }

    bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType = StructureType::eEventCreateInfo;
    const void *                           pNext = {};
    VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
  };
  static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<EventCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eEventCreateInfo>
  {
    using Type = EventCreateInfo;
  };

  struct FenceCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : FenceCreateInfo( *reinterpret_cast<FenceCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>( &rhs );
      return *this;
    }

    FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFenceCreateInfo *>( this );
    }

    operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFenceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FenceCreateInfo const & ) const = default;
#else
    bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
    }

    bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType = StructureType::eFenceCreateInfo;
    const void *                           pNext = {};
    VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
  };
  static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFenceCreateInfo>
  {
    using Type = FenceCreateInfo;
  };

  struct FramebufferCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_           = {},
                                                VULKAN_HPP_NAMESPACE::RenderPass             renderPass_      = {},
                                                uint32_t                                     attachmentCount_ = {},
                                                const VULKAN_HPP_NAMESPACE::ImageView *      pAttachments_    = {},
                                                uint32_t                                     width_           = {},
                                                uint32_t                                     height_          = {},
                                                uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , renderPass( renderPass_ )
      , attachmentCount( attachmentCount_ )
      , pAttachments( pAttachments_ )
      , width( width_ )
      , height( height_ )
      , layers( layers_ )
    {}

    VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : FramebufferCreateInfo( *reinterpret_cast<FramebufferCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferCreateInfo(
      VULKAN_HPP_NAMESPACE::FramebufferCreateFlags                                                 flags_,
      VULKAN_HPP_NAMESPACE::RenderPass                                                             renderPass_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_,
      uint32_t                                                                                     width_  = {},
      uint32_t                                                                                     height_ = {},
      uint32_t                                                                                     layers_ = {} )
      : flags( flags_ )
      , renderPass( renderPass_ )
      , attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
      , pAttachments( attachments_.data() )
      , width( width_ )
      , height( height_ )
      , layers( layers_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo &
                            operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>( &rhs );
      return *this;
    }

    FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPass = renderPass_;
      return *this;
    }

    FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferCreateInfo & setAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
      VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
    {
      layers = layers_;
      return *this;
    }

    operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFramebufferCreateInfo *>( this );
    }

    operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFramebufferCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FramebufferCreateInfo const & ) const = default;
#else
    bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( renderPass == rhs.renderPass ) && ( attachmentCount == rhs.attachmentCount ) &&
             ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) &&
             ( layers == rhs.layers );
    }

    bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType           = StructureType::eFramebufferCreateInfo;
    const void *                                 pNext           = {};
    VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags           = {};
    VULKAN_HPP_NAMESPACE::RenderPass             renderPass      = {};
    uint32_t                                     attachmentCount = {};
    const VULKAN_HPP_NAMESPACE::ImageView *      pAttachments    = {};
    uint32_t                                     width           = {};
    uint32_t                                     height          = {};
    uint32_t                                     layers          = {};
  };
  static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
  {
    using Type = FramebufferCreateInfo;
  };

  struct VertexInputBindingDescription
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VertexInputBindingDescription( uint32_t                              binding_ = {},
                                     uint32_t                              stride_  = {},
                                     VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ =
                                       VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT
      : binding( binding_ )
      , stride( stride_ )
      , inputRate( inputRate_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputBindingDescription( *reinterpret_cast<VertexInputBindingDescription const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription &
                            operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>( &rhs );
      return *this;
    }

    VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
    {
      stride = stride_;
      return *this;
    }

    VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
    {
      inputRate = inputRate_;
      return *this;
    }

    operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputBindingDescription *>( this );
    }

    operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputBindingDescription *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputBindingDescription const & ) const = default;
#else
    bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate );
    }

    bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                              binding   = {};
    uint32_t                              stride    = {};
    VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
  };
  static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VertexInputBindingDescription>::value,
                 "struct wrapper is not a standard layout!" );

  struct VertexInputAttributeDescription
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VertexInputAttributeDescription( uint32_t                     location_ = {},
                                       uint32_t                     binding_  = {},
                                       VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
                                       uint32_t                     offset_ = {} ) VULKAN_HPP_NOEXCEPT
      : location( location_ )
      , binding( binding_ )
      , format( format_ )
      , offset( offset_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputAttributeDescription( *reinterpret_cast<VertexInputAttributeDescription const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription &
                            operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>( &rhs );
      return *this;
    }

    VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
    {
      location = location_;
      return *this;
    }

    VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputAttributeDescription *>( this );
    }

    operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputAttributeDescription *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputAttributeDescription const & ) const = default;
#else
    bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) &&
             ( offset == rhs.offset );
    }

    bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                     location = {};
    uint32_t                     binding  = {};
    VULKAN_HPP_NAMESPACE::Format format   = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    uint32_t                     offset   = {};
  };
  static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VertexInputAttributeDescription>::value,
                 "struct wrapper is not a standard layout!" );

  struct PipelineVertexInputStateCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineVertexInputStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags     flags_                           = {},
      uint32_t                                                      vertexBindingDescriptionCount_   = {},
      const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription *   pVertexBindingDescriptions_      = {},
      uint32_t                                                      vertexAttributeDescriptionCount_ = {},
      const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_    = {} )
      VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
      , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
      , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
      , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineVertexInputStateCreateInfo( *reinterpret_cast<PipelineVertexInputStateCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const &
        vertexBindingDescriptions_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const &
        vertexAttributeDescriptions_ = {} )
      : flags( flags_ )
      , vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) )
      , pVertexBindingDescriptions( vertexBindingDescriptions_.data() )
      , vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) )
      , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo &
                            operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineVertexInputStateCreateInfo &
      operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineVertexInputStateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineVertexInputStateCreateInfo &
      setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
      return *this;
    }

    PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions(
      const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexBindingDescriptions = pVertexBindingDescriptions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const &
        vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
      pVertexBindingDescriptions    = vertexBindingDescriptions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineVertexInputStateCreateInfo &
      setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
      return *this;
    }

    PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions(
      const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const &
        vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
      pVertexAttributeDescriptions    = vertexAttributeDescriptions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo *>( this );
    }

    operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) &&
             ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) &&
             ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) &&
             ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
    }

    bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags     flags                           = {};
    uint32_t                                                      vertexBindingDescriptionCount   = {};
    const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription *   pVertexBindingDescriptions      = {};
    uint32_t                                                      vertexAttributeDescriptionCount = {};
    const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions    = {};
  };
  static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineVertexInputStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
  {
    using Type = PipelineVertexInputStateCreateInfo;
  };

  struct PipelineInputAssemblyStateCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineInputAssemblyStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {},
      VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList,
      VULKAN_HPP_NAMESPACE::Bool32            primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , topology( topology_ )
      , primitiveRestartEnable( primitiveRestartEnable_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast<PipelineInputAssemblyStateCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo &
                            operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineInputAssemblyStateCreateInfo &
      operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineInputAssemblyStateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineInputAssemblyStateCreateInfo &
      setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
    {
      topology = topology_;
      return *this;
    }

    PipelineInputAssemblyStateCreateInfo &
      setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveRestartEnable = primitiveRestartEnable_;
      return *this;
    }

    operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo *>( this );
    }

    operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( topology == rhs.topology ) && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
    }

    bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
    VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
    VULKAN_HPP_NAMESPACE::Bool32            primitiveRestartEnable = {};
  };
  static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineInputAssemblyStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
  {
    using Type = PipelineInputAssemblyStateCreateInfo;
  };

  struct PipelineTessellationStateCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineTessellationStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {},
                                           uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , patchControlPoints( patchControlPoints_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineTessellationStateCreateInfo( *reinterpret_cast<PipelineTessellationStateCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo &
                            operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineTessellationStateCreateInfo &
      operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineTessellationStateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
    {
      patchControlPoints = patchControlPoints_;
      return *this;
    }

    operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo *>( this );
    }

    operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineTessellationStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( patchControlPoints == rhs.patchControlPoints );
    }

    bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags              = {};
    uint32_t                                                   patchControlPoints = {};
  };
  static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineTessellationStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
  {
    using Type = PipelineTessellationStateCreateInfo;
  };

  struct PipelineViewportStateCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_         = {},
                                       uint32_t                                               viewportCount_ = {},
                                       const VULKAN_HPP_NAMESPACE::Viewport *                 pViewports_    = {},
                                       uint32_t                                               scissorCount_  = {},
                                       const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , viewportCount( viewportCount_ )
      , pViewports( pViewports_ )
      , scissorCount( scissorCount_ )
      , pScissors( pScissors_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportStateCreateInfo( *reinterpret_cast<PipelineViewportStateCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags                                      flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const &   scissors_ = {} )
      : flags( flags_ )
      , viewportCount( static_cast<uint32_t>( viewports_.size() ) )
      , pViewports( viewports_.data() )
      , scissorCount( static_cast<uint32_t>( scissors_.size() ) )
      , pScissors( scissors_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo &
                            operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineViewportStateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount = viewportCount_;
      return *this;
    }

    PipelineViewportStateCreateInfo &
      setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewports = pViewports_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportStateCreateInfo & setViewports(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ )
      VULKAN_HPP_NOEXCEPT
    {
      viewportCount = static_cast<uint32_t>( viewports_.size() );
      pViewports    = viewports_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      scissorCount = scissorCount_;
      return *this;
    }

    PipelineViewportStateCreateInfo &
      setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT
    {
      pScissors = pScissors_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportStateCreateInfo &
      setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ )
        VULKAN_HPP_NOEXCEPT
    {
      scissorCount = static_cast<uint32_t>( scissors_.size() );
      pScissors    = scissors_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportStateCreateInfo *>( this );
    }

    operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( viewportCount == rhs.viewportCount ) && ( pViewports == rhs.pViewports ) &&
             ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors );
    }

    bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType = StructureType::ePipelineViewportStateCreateInfo;
    const void *                                           pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
    uint32_t                                               viewportCount = {};
    const VULKAN_HPP_NAMESPACE::Viewport *                 pViewports    = {};
    uint32_t                                               scissorCount  = {};
    const VULKAN_HPP_NAMESPACE::Rect2D *                   pScissors     = {};
  };
  static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineViewportStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
  {
    using Type = PipelineViewportStateCreateInfo;
  };

  struct PipelineRasterizationStateCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineRasterizationStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32                                depthClampEnable_        = {},
      VULKAN_HPP_NAMESPACE::Bool32                                rasterizerDiscardEnable_ = {},
      VULKAN_HPP_NAMESPACE::PolygonMode   polygonMode_             = VULKAN_HPP_NAMESPACE::PolygonMode::eFill,
      VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_                = {},
      VULKAN_HPP_NAMESPACE::FrontFace     frontFace_               = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise,
      VULKAN_HPP_NAMESPACE::Bool32        depthBiasEnable_         = {},
      float                               depthBiasConstantFactor_ = {},
      float                               depthBiasClamp_          = {},
      float                               depthBiasSlopeFactor_    = {},
      float                               lineWidth_               = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , depthClampEnable( depthClampEnable_ )
      , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
      , polygonMode( polygonMode_ )
      , cullMode( cullMode_ )
      , frontFace( frontFace_ )
      , depthBiasEnable( depthBiasEnable_ )
      , depthBiasConstantFactor( depthBiasConstantFactor_ )
      , depthBiasClamp( depthBiasClamp_ )
      , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
      , lineWidth( lineWidth_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationStateCreateInfo( *reinterpret_cast<PipelineRasterizationStateCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo &
                            operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationStateCreateInfo &
      operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo &
      setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClampEnable = depthClampEnable_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo &
      setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizerDiscardEnable = rasterizerDiscardEnable_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo &
      setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
    {
      polygonMode = polygonMode_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo &
      setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
    {
      cullMode = cullMode_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo &
      setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
    {
      frontFace = frontFace_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo &
      setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasEnable = depthBiasEnable_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo &
      setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasConstantFactor = depthBiasConstantFactor_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasClamp = depthBiasClamp_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBiasSlopeFactor = depthBiasSlopeFactor_;
      return *this;
    }

    PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
    {
      lineWidth = lineWidth_;
      return *this;
    }

    operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo *>( this );
    }

    operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( depthClampEnable == rhs.depthClampEnable ) &&
             ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) &&
             ( cullMode == rhs.cullMode ) && ( frontFace == rhs.frontFace ) &&
             ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) &&
             ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) &&
             ( lineWidth == rhs.lineWidth );
    }

    bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags                   = {};
    VULKAN_HPP_NAMESPACE::Bool32                                depthClampEnable        = {};
    VULKAN_HPP_NAMESPACE::Bool32                                rasterizerDiscardEnable = {};
    VULKAN_HPP_NAMESPACE::PolygonMode                           polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
    VULKAN_HPP_NAMESPACE::CullModeFlags                         cullMode    = {};
    VULKAN_HPP_NAMESPACE::FrontFace frontFace               = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
    VULKAN_HPP_NAMESPACE::Bool32    depthBiasEnable         = {};
    float                           depthBiasConstantFactor = {};
    float                           depthBiasClamp          = {};
    float                           depthBiasSlopeFactor    = {};
    float                           lineWidth               = {};
  };
  static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineRasterizationStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
  {
    using Type = PipelineRasterizationStateCreateInfo;
  };

  struct PipelineMultisampleStateCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineMultisampleStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {},
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_  = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
      VULKAN_HPP_NAMESPACE::Bool32              sampleShadingEnable_   = {},
      float                                     minSampleShading_      = {},
      const VULKAN_HPP_NAMESPACE::SampleMask *  pSampleMask_           = {},
      VULKAN_HPP_NAMESPACE::Bool32              alphaToCoverageEnable_ = {},
      VULKAN_HPP_NAMESPACE::Bool32              alphaToOneEnable_      = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , rasterizationSamples( rasterizationSamples_ )
      , sampleShadingEnable( sampleShadingEnable_ )
      , minSampleShading( minSampleShading_ )
      , pSampleMask( pSampleMask_ )
      , alphaToCoverageEnable( alphaToCoverageEnable_ )
      , alphaToOneEnable( alphaToOneEnable_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineMultisampleStateCreateInfo( *reinterpret_cast<PipelineMultisampleStateCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo &
                            operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineMultisampleStateCreateInfo &
      operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineMultisampleStateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineMultisampleStateCreateInfo &
      setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationSamples = rasterizationSamples_;
      return *this;
    }

    PipelineMultisampleStateCreateInfo &
      setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleShadingEnable = sampleShadingEnable_;
      return *this;
    }

    PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
    {
      minSampleShading = minSampleShading_;
      return *this;
    }

    PipelineMultisampleStateCreateInfo &
      setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT
    {
      pSampleMask = pSampleMask_;
      return *this;
    }

    PipelineMultisampleStateCreateInfo &
      setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaToCoverageEnable = alphaToCoverageEnable_;
      return *this;
    }

    PipelineMultisampleStateCreateInfo &
      setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaToOneEnable = alphaToOneEnable_;
      return *this;
    }

    operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo *>( this );
    }

    operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( rasterizationSamples == rhs.rasterizationSamples ) &&
             ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) &&
             ( pSampleMask == rhs.pSampleMask ) && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) &&
             ( alphaToOneEnable == rhs.alphaToOneEnable );
    }

    bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples  = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
    VULKAN_HPP_NAMESPACE::Bool32              sampleShadingEnable   = {};
    float                                     minSampleShading      = {};
    const VULKAN_HPP_NAMESPACE::SampleMask *  pSampleMask           = {};
    VULKAN_HPP_NAMESPACE::Bool32              alphaToCoverageEnable = {};
    VULKAN_HPP_NAMESPACE::Bool32              alphaToOneEnable      = {};
  };
  static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineMultisampleStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
  {
    using Type = PipelineMultisampleStateCreateInfo;
  };

  struct StencilOpState
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_      = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
                      VULKAN_HPP_NAMESPACE::StencilOp passOp_      = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
                      VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep,
                      VULKAN_HPP_NAMESPACE::CompareOp compareOp_   = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
                      uint32_t                        compareMask_ = {},
                      uint32_t                        writeMask_   = {},
                      uint32_t                        reference_   = {} ) VULKAN_HPP_NOEXCEPT
      : failOp( failOp_ )
      , passOp( passOp_ )
      , depthFailOp( depthFailOp_ )
      , compareOp( compareOp_ )
      , compareMask( compareMask_ )
      , writeMask( writeMask_ )
      , reference( reference_ )
    {}

    VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
      : StencilOpState( *reinterpret_cast<StencilOpState const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>( &rhs );
      return *this;
    }

    StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
    {
      failOp = failOp_;
      return *this;
    }

    StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
    {
      passOp = passOp_;
      return *this;
    }

    StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthFailOp = depthFailOp_;
      return *this;
    }

    StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
    {
      compareOp = compareOp_;
      return *this;
    }

    StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
    {
      compareMask = compareMask_;
      return *this;
    }

    StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
    {
      writeMask = writeMask_;
      return *this;
    }

    StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
    {
      reference = reference_;
      return *this;
    }

    operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkStencilOpState *>( this );
    }

    operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkStencilOpState *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( StencilOpState const & ) const = default;
#else
    bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) &&
             ( compareOp == rhs.compareOp ) && ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) &&
             ( reference == rhs.reference );
    }

    bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StencilOp failOp      = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
    VULKAN_HPP_NAMESPACE::StencilOp passOp      = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
    VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
    VULKAN_HPP_NAMESPACE::CompareOp compareOp   = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
    uint32_t                        compareMask = {};
    uint32_t                        writeMask   = {};
    uint32_t                        reference   = {};
  };
  static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<StencilOpState>::value, "struct wrapper is not a standard layout!" );

  struct PipelineDepthStencilStateCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineDepthStencilStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_            = {},
      VULKAN_HPP_NAMESPACE::Bool32                               depthTestEnable_  = {},
      VULKAN_HPP_NAMESPACE::Bool32                               depthWriteEnable_ = {},
      VULKAN_HPP_NAMESPACE::CompareOp      depthCompareOp_        = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
      VULKAN_HPP_NAMESPACE::Bool32         depthBoundsTestEnable_ = {},
      VULKAN_HPP_NAMESPACE::Bool32         stencilTestEnable_     = {},
      VULKAN_HPP_NAMESPACE::StencilOpState front_                 = {},
      VULKAN_HPP_NAMESPACE::StencilOpState back_                  = {},
      float                                minDepthBounds_        = {},
      float                                maxDepthBounds_        = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , depthTestEnable( depthTestEnable_ )
      , depthWriteEnable( depthWriteEnable_ )
      , depthCompareOp( depthCompareOp_ )
      , depthBoundsTestEnable( depthBoundsTestEnable_ )
      , stencilTestEnable( stencilTestEnable_ )
      , front( front_ )
      , back( back_ )
      , minDepthBounds( minDepthBounds_ )
      , maxDepthBounds( maxDepthBounds_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineDepthStencilStateCreateInfo( *reinterpret_cast<PipelineDepthStencilStateCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo &
                            operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineDepthStencilStateCreateInfo &
      operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo &
      setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthTestEnable = depthTestEnable_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo &
      setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthWriteEnable = depthWriteEnable_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo &
      setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
    {
      depthCompareOp = depthCompareOp_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo &
      setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthBoundsTestEnable = depthBoundsTestEnable_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo &
      setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilTestEnable = stencilTestEnable_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo &
      setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
    {
      front = front_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo &
      setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
    {
      back = back_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
    {
      minDepthBounds = minDepthBounds_;
      return *this;
    }

    PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
    {
      maxDepthBounds = maxDepthBounds_;
      return *this;
    }

    operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo *>( this );
    }

    operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( depthTestEnable == rhs.depthTestEnable ) && ( depthWriteEnable == rhs.depthWriteEnable ) &&
             ( depthCompareOp == rhs.depthCompareOp ) && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) &&
             ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && ( back == rhs.back ) &&
             ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds );
    }

    bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags            = {};
    VULKAN_HPP_NAMESPACE::Bool32                               depthTestEnable  = {};
    VULKAN_HPP_NAMESPACE::Bool32                               depthWriteEnable = {};
    VULKAN_HPP_NAMESPACE::CompareOp                            depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
    VULKAN_HPP_NAMESPACE::Bool32                               depthBoundsTestEnable = {};
    VULKAN_HPP_NAMESPACE::Bool32                               stencilTestEnable     = {};
    VULKAN_HPP_NAMESPACE::StencilOpState                       front                 = {};
    VULKAN_HPP_NAMESPACE::StencilOpState                       back                  = {};
    float                                                      minDepthBounds        = {};
    float                                                      maxDepthBounds        = {};
  };
  static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineDepthStencilStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
  {
    using Type = PipelineDepthStencilStateCreateInfo;
  };

  struct PipelineColorBlendAttachmentState
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(
      VULKAN_HPP_NAMESPACE::Bool32              blendEnable_         = {},
      VULKAN_HPP_NAMESPACE::BlendFactor         srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
      VULKAN_HPP_NAMESPACE::BlendFactor         dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
      VULKAN_HPP_NAMESPACE::BlendOp             colorBlendOp_        = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
      VULKAN_HPP_NAMESPACE::BlendFactor         srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
      VULKAN_HPP_NAMESPACE::BlendFactor         dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero,
      VULKAN_HPP_NAMESPACE::BlendOp             alphaBlendOp_        = VULKAN_HPP_NAMESPACE::BlendOp::eAdd,
      VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_      = {} ) VULKAN_HPP_NOEXCEPT
      : blendEnable( blendEnable_ )
      , srcColorBlendFactor( srcColorBlendFactor_ )
      , dstColorBlendFactor( dstColorBlendFactor_ )
      , colorBlendOp( colorBlendOp_ )
      , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
      , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
      , alphaBlendOp( alphaBlendOp_ )
      , colorWriteMask( colorWriteMask_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineColorBlendAttachmentState( *reinterpret_cast<PipelineColorBlendAttachmentState const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState &
                            operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>( &rhs );
      return *this;
    }

    PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      blendEnable = blendEnable_;
      return *this;
    }

    PipelineColorBlendAttachmentState &
      setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      srcColorBlendFactor = srcColorBlendFactor_;
      return *this;
    }

    PipelineColorBlendAttachmentState &
      setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      dstColorBlendFactor = dstColorBlendFactor_;
      return *this;
    }

    PipelineColorBlendAttachmentState &
      setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
    {
      colorBlendOp = colorBlendOp_;
      return *this;
    }

    PipelineColorBlendAttachmentState &
      setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAlphaBlendFactor = srcAlphaBlendFactor_;
      return *this;
    }

    PipelineColorBlendAttachmentState &
      setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAlphaBlendFactor = dstAlphaBlendFactor_;
      return *this;
    }

    PipelineColorBlendAttachmentState &
      setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaBlendOp = alphaBlendOp_;
      return *this;
    }

    PipelineColorBlendAttachmentState &
      setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
    {
      colorWriteMask = colorWriteMask_;
      return *this;
    }

    operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineColorBlendAttachmentState *>( this );
    }

    operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineColorBlendAttachmentState *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default;
#else
    bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) &&
             ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) &&
             ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) &&
             ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask );
    }

    bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Bool32              blendEnable         = {};
    VULKAN_HPP_NAMESPACE::BlendFactor         srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
    VULKAN_HPP_NAMESPACE::BlendFactor         dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
    VULKAN_HPP_NAMESPACE::BlendOp             colorBlendOp        = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
    VULKAN_HPP_NAMESPACE::BlendFactor         srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
    VULKAN_HPP_NAMESPACE::BlendFactor         dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
    VULKAN_HPP_NAMESPACE::BlendOp             alphaBlendOp        = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
    VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask      = {};
  };
  static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineColorBlendAttachmentState>::value,
                 "struct wrapper is not a standard layout!" );

  struct PipelineColorBlendStateCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineColorBlendStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags        flags_         = {},
      VULKAN_HPP_NAMESPACE::Bool32                                    logicOpEnable_ = {},
      VULKAN_HPP_NAMESPACE::LogicOp                                   logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear,
      uint32_t                                                        attachmentCount_ = {},
      const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_    = {},
      std::array<float, 4> const &                                    blendConstants_  = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , logicOpEnable( logicOpEnable_ )
      , logicOp( logicOp_ )
      , attachmentCount( attachmentCount_ )
      , pAttachments( pAttachments_ )
      , blendConstants( blendConstants_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineColorBlendStateCreateInfo( *reinterpret_cast<PipelineColorBlendStateCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineColorBlendStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::Bool32                             logicOpEnable_,
      VULKAN_HPP_NAMESPACE::LogicOp                            logicOp_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_,
      std::array<float, 4> const &                                             blendConstants_ = {} )
      : flags( flags_ )
      , logicOpEnable( logicOpEnable_ )
      , logicOp( logicOp_ )
      , attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
      , pAttachments( attachments_.data() )
      , blendConstants( blendConstants_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo &
                            operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineColorBlendStateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineColorBlendStateCreateInfo &
      setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      logicOpEnable = logicOpEnable_;
      return *this;
    }

    PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
    {
      logicOp = logicOp_;
      return *this;
    }

    PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    PipelineColorBlendStateCreateInfo & setPAttachments(
      const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineColorBlendStateCreateInfo & setAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float, 4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
    {
      blendConstants = blendConstants_;
      return *this;
    }

    operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo *>( this );
    }

    operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( logicOpEnable == rhs.logicOpEnable ) && ( logicOp == rhs.logicOp ) &&
             ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) &&
             ( blendConstants == rhs.blendConstants );
    }

    bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                      sType = StructureType::ePipelineColorBlendStateCreateInfo;
    const void *                                             pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
    VULKAN_HPP_NAMESPACE::Bool32                             logicOpEnable   = {};
    VULKAN_HPP_NAMESPACE::LogicOp                            logicOp         = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
    uint32_t                                                 attachmentCount = {};
    const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments   = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4>                  blendConstants = {};
  };
  static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineColorBlendStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
  {
    using Type = PipelineColorBlendStateCreateInfo;
  };

  struct PipelineDynamicStateCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_             = {},
      uint32_t                                              dynamicStateCount_ = {},
      const VULKAN_HPP_NAMESPACE::DynamicState *            pDynamicStates_    = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , dynamicStateCount( dynamicStateCount_ )
      , pDynamicStates( pDynamicStates_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineDynamicStateCreateInfo( *reinterpret_cast<PipelineDynamicStateCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineDynamicStateCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags                                           flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ )
      : flags( flags_ )
      , dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) )
      , pDynamicStates( dynamicStates_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo &
                            operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineDynamicStateCreateInfo &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dynamicStateCount = dynamicStateCount_;
      return *this;
    }

    PipelineDynamicStateCreateInfo &
      setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
    {
      pDynamicStates = pDynamicStates_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineDynamicStateCreateInfo & setDynamicStates(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ )
      VULKAN_HPP_NOEXCEPT
    {
      dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
      pDynamicStates    = dynamicStates_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo *>( this );
    }

    operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineDynamicStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( dynamicStateCount == rhs.dynamicStateCount ) && ( pDynamicStates == rhs.pDynamicStates );
    }

    bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType = StructureType::ePipelineDynamicStateCreateInfo;
    const void *                                          pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
    uint32_t                                              dynamicStateCount = {};
    const VULKAN_HPP_NAMESPACE::DynamicState *            pDynamicStates    = {};
  };
  static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineDynamicStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
  {
    using Type = PipelineDynamicStateCreateInfo;
  };

  struct GraphicsPipelineCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineCreateFlags                          flags_               = {},
      uint32_t                                                           stageCount_          = {},
      const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *        pStages_             = {},
      const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *   pVertexInputState_   = {},
      const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {},
      const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *  pTessellationState_  = {},
      const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo *      pViewportState_      = {},
      const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {},
      const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo *   pMultisampleState_   = {},
      const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo *  pDepthStencilState_  = {},
      const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo *    pColorBlendState_    = {},
      const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *       pDynamicState_       = {},
      VULKAN_HPP_NAMESPACE::PipelineLayout                               layout_              = {},
      VULKAN_HPP_NAMESPACE::RenderPass                                   renderPass_          = {},
      uint32_t                                                           subpass_             = {},
      VULKAN_HPP_NAMESPACE::Pipeline                                     basePipelineHandle_  = {},
      int32_t                                                            basePipelineIndex_   = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , stageCount( stageCount_ )
      , pStages( pStages_ )
      , pVertexInputState( pVertexInputState_ )
      , pInputAssemblyState( pInputAssemblyState_ )
      , pTessellationState( pTessellationState_ )
      , pViewportState( pViewportState_ )
      , pRasterizationState( pRasterizationState_ )
      , pMultisampleState( pMultisampleState_ )
      , pDepthStencilState( pDepthStencilState_ )
      , pColorBlendState( pColorBlendState_ )
      , pDynamicState( pDynamicState_ )
      , layout( layout_ )
      , renderPass( renderPass_ )
      , subpass( subpass_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : GraphicsPipelineCreateInfo( *reinterpret_cast<GraphicsPipelineCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
                                                                         stages_,
      const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *   pVertexInputState_   = {},
      const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {},
      const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *  pTessellationState_  = {},
      const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo *      pViewportState_      = {},
      const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {},
      const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo *   pMultisampleState_   = {},
      const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo *  pDepthStencilState_  = {},
      const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo *    pColorBlendState_    = {},
      const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *       pDynamicState_       = {},
      VULKAN_HPP_NAMESPACE::PipelineLayout                               layout_              = {},
      VULKAN_HPP_NAMESPACE::RenderPass                                   renderPass_          = {},
      uint32_t                                                           subpass_             = {},
      VULKAN_HPP_NAMESPACE::Pipeline                                     basePipelineHandle_  = {},
      int32_t                                                            basePipelineIndex_   = {} )
      : flags( flags_ )
      , stageCount( static_cast<uint32_t>( stages_.size() ) )
      , pStages( stages_.data() )
      , pVertexInputState( pVertexInputState_ )
      , pInputAssemblyState( pInputAssemblyState_ )
      , pTessellationState( pTessellationState_ )
      , pViewportState( pViewportState_ )
      , pRasterizationState( pRasterizationState_ )
      , pMultisampleState( pMultisampleState_ )
      , pDepthStencilState( pDepthStencilState_ )
      , pColorBlendState( pColorBlendState_ )
      , pDynamicState( pDynamicState_ )
      , layout( layout_ )
      , renderPass( renderPass_ )
      , subpass( subpass_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo &
                            operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>( &rhs );
      return *this;
    }

    GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = stageCount_;
      return *this;
    }

    GraphicsPipelineCreateInfo &
      setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pStages = pStages_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineCreateInfo & setStages(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
        stages_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = static_cast<uint32_t>( stages_.size() );
      pStages    = stages_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GraphicsPipelineCreateInfo & setPVertexInputState(
      const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexInputState = pVertexInputState_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setPInputAssemblyState(
      const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
    {
      pInputAssemblyState = pInputAssemblyState_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setPTessellationState(
      const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
    {
      pTessellationState = pTessellationState_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setPViewportState(
      const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewportState = pViewportState_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setPRasterizationState(
      const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
    {
      pRasterizationState = pRasterizationState_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setPMultisampleState(
      const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
    {
      pMultisampleState = pMultisampleState_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setPDepthStencilState(
      const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthStencilState = pDepthStencilState_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setPColorBlendState(
      const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorBlendState = pColorBlendState_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setPDynamicState(
      const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
    {
      pDynamicState = pDynamicState_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
    {
      renderPass = renderPass_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
    {
      subpass = subpass_;
      return *this;
    }

    GraphicsPipelineCreateInfo &
      setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineHandle = basePipelineHandle_;
      return *this;
    }

    GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineIndex = basePipelineIndex_;
      return *this;
    }

    operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( this );
    }

    operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGraphicsPipelineCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default;
#else
    bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) &&
             ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) &&
             ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) &&
             ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) &&
             ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) &&
             ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) &&
             ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) &&
             ( basePipelineIndex == rhs.basePipelineIndex );
    }

    bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                              sType = StructureType::eGraphicsPipelineCreateInfo;
    const void *                                                     pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineCreateFlags                        flags = {};
    uint32_t                                                         stageCount            = {};
    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *      pStages               = {};
    const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState     = {};
    const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {};
    const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo *  pTessellationState  = {};
    const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo *      pViewportState      = {};
    const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {};
    const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo *   pMultisampleState   = {};
    const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo *  pDepthStencilState  = {};
    const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo *    pColorBlendState    = {};
    const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *       pDynamicState       = {};
    VULKAN_HPP_NAMESPACE::PipelineLayout                               layout              = {};
    VULKAN_HPP_NAMESPACE::RenderPass                                   renderPass          = {};
    uint32_t                                                           subpass             = {};
    VULKAN_HPP_NAMESPACE::Pipeline                                     basePipelineHandle  = {};
    int32_t                                                            basePipelineIndex   = {};
  };
  static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<GraphicsPipelineCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
  {
    using Type = GraphicsPipelineCreateInfo;
  };

  struct ImageCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageCreateInfo(
      VULKAN_HPP_NAMESPACE::ImageCreateFlags    flags_                 = {},
      VULKAN_HPP_NAMESPACE::ImageType           imageType_             = VULKAN_HPP_NAMESPACE::ImageType::e1D,
      VULKAN_HPP_NAMESPACE::Format              format_                = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      VULKAN_HPP_NAMESPACE::Extent3D            extent_                = {},
      uint32_t                                  mipLevels_             = {},
      uint32_t                                  arrayLayers_           = {},
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_               = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
      VULKAN_HPP_NAMESPACE::ImageTiling         tiling_                = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
      VULKAN_HPP_NAMESPACE::ImageUsageFlags     usage_                 = {},
      VULKAN_HPP_NAMESPACE::SharingMode         sharingMode_           = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
      uint32_t                                  queueFamilyIndexCount_ = {},
      const uint32_t *                          pQueueFamilyIndices_   = {},
      VULKAN_HPP_NAMESPACE::ImageLayout         initialLayout_         = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
      VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , imageType( imageType_ )
      , format( format_ )
      , extent( extent_ )
      , mipLevels( mipLevels_ )
      , arrayLayers( arrayLayers_ )
      , samples( samples_ )
      , tiling( tiling_ )
      , usage( usage_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( queueFamilyIndexCount_ )
      , pQueueFamilyIndices( pQueueFamilyIndices_ )
      , initialLayout( initialLayout_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageCreateInfo( *reinterpret_cast<ImageCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags                                flags_,
                     VULKAN_HPP_NAMESPACE::ImageType                                       imageType_,
                     VULKAN_HPP_NAMESPACE::Format                                          format_,
                     VULKAN_HPP_NAMESPACE::Extent3D                                        extent_,
                     uint32_t                                                              mipLevels_,
                     uint32_t                                                              arrayLayers_,
                     VULKAN_HPP_NAMESPACE::SampleCountFlagBits                             samples_,
                     VULKAN_HPP_NAMESPACE::ImageTiling                                     tiling_,
                     VULKAN_HPP_NAMESPACE::ImageUsageFlags                                 usage_,
                     VULKAN_HPP_NAMESPACE::SharingMode                                     sharingMode_,
                     VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
                     VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
      : flags( flags_ )
      , imageType( imageType_ )
      , format( format_ )
      , extent( extent_ )
      , mipLevels( mipLevels_ )
      , arrayLayers( arrayLayers_ )
      , samples( samples_ )
      , tiling( tiling_ )
      , usage( usage_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
      , pQueueFamilyIndices( queueFamilyIndices_.data() )
      , initialLayout( initialLayout_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>( &rhs );
      return *this;
    }

    ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
    {
      imageType = imageType_;
      return *this;
    }

    ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
    {
      mipLevels = mipLevels_;
      return *this;
    }

    ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
    {
      arrayLayers = arrayLayers_;
      return *this;
    }

    ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
    {
      samples = samples_;
      return *this;
    }

    ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
    {
      tiling = tiling_;
      return *this;
    }

    ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      sharingMode = sharingMode_;
      return *this;
    }

    ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = queueFamilyIndexCount_;
      return *this;
    }

    ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueFamilyIndices = pQueueFamilyIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageCreateInfo & setQueueFamilyIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
      pQueueFamilyIndices   = queueFamilyIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      initialLayout = initialLayout_;
      return *this;
    }

    operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageCreateInfo *>( this );
    }

    operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageCreateInfo const & ) const = default;
#else
    bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( imageType == rhs.imageType ) && ( format == rhs.format ) && ( extent == rhs.extent ) &&
             ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) &&
             ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) &&
             ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
             ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( initialLayout == rhs.initialLayout );
    }

    bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType                 = StructureType::eImageCreateInfo;
    const void *                              pNext                 = {};
    VULKAN_HPP_NAMESPACE::ImageCreateFlags    flags                 = {};
    VULKAN_HPP_NAMESPACE::ImageType           imageType             = VULKAN_HPP_NAMESPACE::ImageType::e1D;
    VULKAN_HPP_NAMESPACE::Format              format                = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::Extent3D            extent                = {};
    uint32_t                                  mipLevels             = {};
    uint32_t                                  arrayLayers           = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples               = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
    VULKAN_HPP_NAMESPACE::ImageTiling         tiling                = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
    VULKAN_HPP_NAMESPACE::ImageUsageFlags     usage                 = {};
    VULKAN_HPP_NAMESPACE::SharingMode         sharingMode           = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
    uint32_t                                  queueFamilyIndexCount = {};
    const uint32_t *                          pQueueFamilyIndices   = {};
    VULKAN_HPP_NAMESPACE::ImageLayout         initialLayout         = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  };
  static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageCreateInfo>
  {
    using Type = ImageCreateInfo;
  };

  struct ImageViewCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {},
                           VULKAN_HPP_NAMESPACE::Image                image_ = {},
                           VULKAN_HPP_NAMESPACE::ImageViewType    viewType_  = VULKAN_HPP_NAMESPACE::ImageViewType::e1D,
                           VULKAN_HPP_NAMESPACE::Format           format_    = VULKAN_HPP_NAMESPACE::Format::eUndefined,
                           VULKAN_HPP_NAMESPACE::ComponentMapping components_            = {},
                           VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , image( image_ )
      , viewType( viewType_ )
      , format( format_ )
      , components( components_ )
      , subresourceRange( subresourceRange_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewCreateInfo( *reinterpret_cast<ImageViewCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo &
                            operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>( &rhs );
      return *this;
    }

    ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
    {
      viewType = viewType_;
      return *this;
    }

    ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    ImageViewCreateInfo &
      setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
    {
      components = components_;
      return *this;
    }

    ImageViewCreateInfo &
      setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
    {
      subresourceRange = subresourceRange_;
      return *this;
    }

    operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewCreateInfo *>( this );
    }

    operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewCreateInfo const & ) const = default;
#else
    bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) &&
             ( viewType == rhs.viewType ) && ( format == rhs.format ) && ( components == rhs.components ) &&
             ( subresourceRange == rhs.subresourceRange );
    }

    bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType            = StructureType::eImageViewCreateInfo;
    const void *                                pNext            = {};
    VULKAN_HPP_NAMESPACE::ImageViewCreateFlags  flags            = {};
    VULKAN_HPP_NAMESPACE::Image                 image            = {};
    VULKAN_HPP_NAMESPACE::ImageViewType         viewType         = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
    VULKAN_HPP_NAMESPACE::Format                format           = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::ComponentMapping      components       = {};
    VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
  };
  static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageViewCreateInfo>
  {
    using Type = ImageViewCreateInfo;
  };

  struct IndirectCommandsLayoutTokenNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ =
                                       VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup,
                                     uint32_t                                   stream_                       = {},
                                     uint32_t                                   offset_                       = {},
                                     uint32_t                                   vertexBindingUnit_            = {},
                                     VULKAN_HPP_NAMESPACE::Bool32               vertexDynamicStride_          = {},
                                     VULKAN_HPP_NAMESPACE::PipelineLayout       pushconstantPipelineLayout_   = {},
                                     VULKAN_HPP_NAMESPACE::ShaderStageFlags     pushconstantShaderStageFlags_ = {},
                                     uint32_t                                   pushconstantOffset_           = {},
                                     uint32_t                                   pushconstantSize_             = {},
                                     VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_           = {},
                                     uint32_t                                   indexTypeCount_               = {},
                                     const VULKAN_HPP_NAMESPACE::IndexType *    pIndexTypes_                  = {},
                                     const uint32_t * pIndexTypeValues_ = {} ) VULKAN_HPP_NOEXCEPT
      : tokenType( tokenType_ )
      , stream( stream_ )
      , offset( offset_ )
      , vertexBindingUnit( vertexBindingUnit_ )
      , vertexDynamicStride( vertexDynamicStride_ )
      , pushconstantPipelineLayout( pushconstantPipelineLayout_ )
      , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ )
      , pushconstantOffset( pushconstantOffset_ )
      , pushconstantSize( pushconstantSize_ )
      , indirectStateFlags( indirectStateFlags_ )
      , indexTypeCount( indexTypeCount_ )
      , pIndexTypes( pIndexTypes_ )
      , pIndexTypeValues( pIndexTypeValues_ )
    {}

    VULKAN_HPP_CONSTEXPR
      IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsLayoutTokenNV( *reinterpret_cast<IndirectCommandsLayoutTokenNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutTokenNV(
      VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_,
      uint32_t                                          stream_,
      uint32_t                                          offset_,
      uint32_t                                          vertexBindingUnit_,
      VULKAN_HPP_NAMESPACE::Bool32                      vertexDynamicStride_,
      VULKAN_HPP_NAMESPACE::PipelineLayout              pushconstantPipelineLayout_,
      VULKAN_HPP_NAMESPACE::ShaderStageFlags            pushconstantShaderStageFlags_,
      uint32_t                                          pushconstantOffset_,
      uint32_t                                          pushconstantSize_,
      VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV        indirectStateFlags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ = {} )
      : tokenType( tokenType_ )
      , stream( stream_ )
      , offset( offset_ )
      , vertexBindingUnit( vertexBindingUnit_ )
      , vertexDynamicStride( vertexDynamicStride_ )
      , pushconstantPipelineLayout( pushconstantPipelineLayout_ )
      , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ )
      , pushconstantOffset( pushconstantOffset_ )
      , pushconstantSize( pushconstantSize_ )
      , indirectStateFlags( indirectStateFlags_ )
      , indexTypeCount( static_cast<uint32_t>( indexTypes_.size() ) )
      , pIndexTypes( indexTypes_.data() )
      , pIndexTypeValues( indexTypeValues_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() );
#    else
      if ( indexTypes_.size() != indexTypeValues_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV &
                            operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const *>( &rhs );
      return *this;
    }

    IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV &
      setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
    {
      tokenType = tokenType_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT
    {
      stream = stream_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingUnit = vertexBindingUnit_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV &
      setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexDynamicStride = vertexDynamicStride_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout(
      VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      pushconstantPipelineLayout = pushconstantPipelineLayout_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags(
      VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      pushconstantShaderStageFlags = pushconstantShaderStageFlags_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      pushconstantOffset = pushconstantOffset_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
    {
      pushconstantSize = pushconstantSize_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV &
      setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectStateFlags = indirectStateFlags_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      indexTypeCount = indexTypeCount_;
      return *this;
    }

    IndirectCommandsLayoutTokenNV &
      setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      pIndexTypes = pIndexTypes_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutTokenNV & setIndexTypes(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_ )
      VULKAN_HPP_NOEXCEPT
    {
      indexTypeCount = static_cast<uint32_t>( indexTypes_.size() );
      pIndexTypes    = indexTypes_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pIndexTypeValues = pIndexTypeValues_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutTokenNV & setIndexTypeValues(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
    {
      indexTypeCount   = static_cast<uint32_t>( indexTypeValues_.size() );
      pIndexTypeValues = indexTypeValues_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNV *>( this );
    }

    operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsLayoutTokenNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default;
#else
    bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) &&
             ( stream == rhs.stream ) && ( offset == rhs.offset ) && ( vertexBindingUnit == rhs.vertexBindingUnit ) &&
             ( vertexDynamicStride == rhs.vertexDynamicStride ) &&
             ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) &&
             ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) &&
             ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) &&
             ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) &&
             ( pIndexTypes == rhs.pIndexTypes ) && ( pIndexTypeValues == rhs.pIndexTypeValues );
    }

    bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType = StructureType::eIndirectCommandsLayoutTokenNV;
    const void *                                      pNext = {};
    VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType =
      VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup;
    uint32_t                                   stream                       = {};
    uint32_t                                   offset                       = {};
    uint32_t                                   vertexBindingUnit            = {};
    VULKAN_HPP_NAMESPACE::Bool32               vertexDynamicStride          = {};
    VULKAN_HPP_NAMESPACE::PipelineLayout       pushconstantPipelineLayout   = {};
    VULKAN_HPP_NAMESPACE::ShaderStageFlags     pushconstantShaderStageFlags = {};
    uint32_t                                   pushconstantOffset           = {};
    uint32_t                                   pushconstantSize             = {};
    VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags           = {};
    uint32_t                                   indexTypeCount               = {};
    const VULKAN_HPP_NAMESPACE::IndexType *    pIndexTypes                  = {};
    const uint32_t *                           pIndexTypeValues             = {};
  };
  static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<IndirectCommandsLayoutTokenNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenNV>
  {
    using Type = IndirectCommandsLayoutTokenNV;
  };

  struct IndirectCommandsLayoutCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eIndirectCommandsLayoutCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(
      VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {},
      VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
      uint32_t                                tokenCount_        = {},
      const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_        = {},
      uint32_t                                                    streamCount_    = {},
      const uint32_t *                                            pStreamStrides_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , tokenCount( tokenCount_ )
      , pTokens( pTokens_ )
      , streamCount( streamCount_ )
      , pStreamStrides( pStreamStrides_ )
    {}

    VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast<IndirectCommandsLayoutCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutCreateInfoNV(
      VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_,
      VULKAN_HPP_NAMESPACE::PipelineBindPoint                  pipelineBindPoint_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const &
                                                                            tokens_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ = {} )
      : flags( flags_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , tokenCount( static_cast<uint32_t>( tokens_.size() ) )
      , pTokens( tokens_.data() )
      , streamCount( static_cast<uint32_t>( streamStrides_.size() ) )
      , pStreamStrides( streamStrides_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV &
                            operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IndirectCommandsLayoutCreateInfoNV &
      operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const *>( &rhs );
      return *this;
    }

    IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    IndirectCommandsLayoutCreateInfoNV &
      setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    IndirectCommandsLayoutCreateInfoNV &
      setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
    {
      tokenCount = tokenCount_;
      return *this;
    }

    IndirectCommandsLayoutCreateInfoNV &
      setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT
    {
      pTokens = pTokens_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutCreateInfoNV & setTokens(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const &
        tokens_ ) VULKAN_HPP_NOEXCEPT
    {
      tokenCount = static_cast<uint32_t>( tokens_.size() );
      pTokens    = tokens_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
    {
      streamCount = streamCount_;
      return *this;
    }

    IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
    {
      pStreamStrides = pStreamStrides_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    IndirectCommandsLayoutCreateInfoNV & setStreamStrides(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
    {
      streamCount    = static_cast<uint32_t>( streamStrides_.size() );
      pStreamStrides = streamStrides_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( this );
    }

    operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default;
#else
    bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( tokenCount == rhs.tokenCount ) &&
             ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) &&
             ( pStreamStrides == rhs.pStreamStrides );
    }

    bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                      sType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
    const void *                                             pNext = {};
    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {};
    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint      = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
    uint32_t                                tokenCount             = {};
    const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens        = {};
    uint32_t                                                    streamCount    = {};
    const uint32_t *                                            pStreamStrides = {};
  };
  static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<IndirectCommandsLayoutCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoNV>
  {
    using Type = IndirectCommandsLayoutCreateInfoNV;
  };

  struct PipelineCacheCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_           = {},
                                                  size_t                                         initialDataSize_ = {},
                                                  const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , initialDataSize( initialDataSize_ )
      , pInitialData( pInitialData_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCacheCreateInfo( *reinterpret_cast<PipelineCacheCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags                 flags_,
                             VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
      : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo &
                            operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      initialDataSize = initialDataSize_;
      return *this;
    }

    PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
    {
      pInitialData = pInitialData_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    PipelineCacheCreateInfo &
      setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
    {
      initialDataSize = initialData_.size() * sizeof( T );
      pInitialData    = initialData_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCacheCreateInfo *>( this );
    }

    operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCacheCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCacheCreateInfo const & ) const = default;
#else
    bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData );
    }

    bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType           = StructureType::ePipelineCacheCreateInfo;
    const void *                                   pNext           = {};
    VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags           = {};
    size_t                                         initialDataSize = {};
    const void *                                   pInitialData    = {};
  };
  static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
  {
    using Type = PipelineCacheCreateInfo;
  };

  struct PushConstantRange
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {},
                                            uint32_t                               offset_     = {},
                                            uint32_t                               size_ = {} ) VULKAN_HPP_NOEXCEPT
      : stageFlags( stageFlags_ )
      , offset( offset_ )
      , size( size_ )
    {}

    VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
      : PushConstantRange( *reinterpret_cast<PushConstantRange const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PushConstantRange &
                            operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>( &rhs );
      return *this;
    }

    PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      stageFlags = stageFlags_;
      return *this;
    }

    PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPushConstantRange *>( this );
    }

    operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPushConstantRange *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PushConstantRange const & ) const = default;
#else
    bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size );
    }

    bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
    uint32_t                               offset     = {};
    uint32_t                               size       = {};
  };
  static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PushConstantRange>::value, "struct wrapper is not a standard layout!" );

  struct PipelineLayoutCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags   flags_                  = {},
      uint32_t                                          setLayoutCount_         = {},
      const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_            = {},
      uint32_t                                          pushConstantRangeCount_ = {},
      const VULKAN_HPP_NAMESPACE::PushConstantRange *   pPushConstantRanges_    = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , setLayoutCount( setLayoutCount_ )
      , pSetLayouts( pSetLayouts_ )
      , pushConstantRangeCount( pushConstantRangeCount_ )
      , pPushConstantRanges( pPushConstantRanges_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineLayoutCreateInfo( *reinterpret_cast<PipelineLayoutCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLayoutCreateInfo(
      VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const &
        setLayouts_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const &
        pushConstantRanges_ = {} )
      : flags( flags_ )
      , setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) )
      , pSetLayouts( setLayouts_.data() )
      , pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) )
      , pPushConstantRanges( pushConstantRanges_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo &
                            operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
    {
      setLayoutCount = setLayoutCount_;
      return *this;
    }

    PipelineLayoutCreateInfo &
      setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pSetLayouts = pSetLayouts_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLayoutCreateInfo & setSetLayouts(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const &
        setLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
      pSetLayouts    = setLayouts_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      pushConstantRangeCount = pushConstantRangeCount_;
      return *this;
    }

    PipelineLayoutCreateInfo &
      setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
    {
      pPushConstantRanges = pPushConstantRanges_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLayoutCreateInfo & setPushConstantRanges(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const &
        pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
    {
      pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
      pPushConstantRanges    = pushConstantRanges_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineLayoutCreateInfo *>( this );
    }

    operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineLayoutCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineLayoutCreateInfo const & ) const = default;
#else
    bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ) &&
             ( pushConstantRangeCount == rhs.pushConstantRangeCount ) &&
             ( pPushConstantRanges == rhs.pPushConstantRanges );
    }

    bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType                  = StructureType::ePipelineLayoutCreateInfo;
    const void *                                      pNext                  = {};
    VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags   flags                  = {};
    uint32_t                                          setLayoutCount         = {};
    const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts            = {};
    uint32_t                                          pushConstantRangeCount = {};
    const VULKAN_HPP_NAMESPACE::PushConstantRange *   pPushConstantRanges    = {};
  };
  static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
  {
    using Type = PipelineLayoutCreateInfo;
  };

  struct PrivateDataSlotCreateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ = {} )
      VULKAN_HPP_NOEXCEPT : flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PrivateDataSlotCreateInfoEXT( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PrivateDataSlotCreateInfoEXT( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PrivateDataSlotCreateInfoEXT( *reinterpret_cast<PrivateDataSlotCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfoEXT &
                            operator=( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PrivateDataSlotCreateInfoEXT & operator=( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PrivateDataSlotCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PrivateDataSlotCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkPrivateDataSlotCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( this );
    }

    operator VkPrivateDataSlotCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPrivateDataSlotCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PrivateDataSlotCreateInfoEXT const & ) const = default;
#else
    bool operator==( PrivateDataSlotCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
    }

    bool operator!=( PrivateDataSlotCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType = StructureType::ePrivateDataSlotCreateInfoEXT;
    const void *                                        pNext = {};
    VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags = {};
  };
  static_assert( sizeof( PrivateDataSlotCreateInfoEXT ) == sizeof( VkPrivateDataSlotCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PrivateDataSlotCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfoEXT>
  {
    using Type = PrivateDataSlotCreateInfoEXT;
  };

  class PrivateDataSlotEXT
  {
  public:
    using CType = VkPrivateDataSlotEXT;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;

  public:
    VULKAN_HPP_CONSTEXPR         PrivateDataSlotEXT() = default;
    VULKAN_HPP_CONSTEXPR         PrivateDataSlotEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlotEXT( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT
      : m_privateDataSlotEXT( privateDataSlotEXT )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    PrivateDataSlotEXT & operator=( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT
    {
      m_privateDataSlotEXT = privateDataSlotEXT;
      return *this;
    }
#endif

    PrivateDataSlotEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_privateDataSlotEXT = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PrivateDataSlotEXT const & ) const = default;
#else
    bool operator==( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_privateDataSlotEXT == rhs.m_privateDataSlotEXT;
    }

    bool operator!=( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_privateDataSlotEXT != rhs.m_privateDataSlotEXT;
    }

    bool operator<( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_privateDataSlotEXT < rhs.m_privateDataSlotEXT;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlotEXT() const VULKAN_HPP_NOEXCEPT
    {
      return m_privateDataSlotEXT;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_privateDataSlotEXT != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_privateDataSlotEXT == VK_NULL_HANDLE;
    }

  private:
    VkPrivateDataSlotEXT m_privateDataSlotEXT = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT ) == sizeof( VkPrivateDataSlotEXT ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePrivateDataSlotEXT>
  {
    using type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT>
  {
    using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct QueryPoolCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(
      VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags        flags_      = {},
      VULKAN_HPP_NAMESPACE::QueryType                   queryType_  = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion,
      uint32_t                                          queryCount_ = {},
      VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , queryType( queryType_ )
      , queryCount( queryCount_ )
      , pipelineStatistics( pipelineStatistics_ )
    {}

    VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueryPoolCreateInfo( *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo &
                            operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>( &rhs );
      return *this;
    }

    QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
    {
      queryType = queryType_;
      return *this;
    }

    QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queryCount = queryCount_;
      return *this;
    }

    QueryPoolCreateInfo &
      setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStatistics = pipelineStatistics_;
      return *this;
    }

    operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueryPoolCreateInfo *>( this );
    }

    operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueryPoolCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueryPoolCreateInfo const & ) const = default;
#else
    bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) &&
             ( pipelineStatistics == rhs.pipelineStatistics );
    }

    bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType              = StructureType::eQueryPoolCreateInfo;
    const void *                                      pNext              = {};
    VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags        flags              = {};
    VULKAN_HPP_NAMESPACE::QueryType                   queryType          = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
    uint32_t                                          queryCount         = {};
    VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
  };
  static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
  {
    using Type = QueryPoolCreateInfo;
  };

  struct RayTracingShaderGroupCreateInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eRayTracingShaderGroupCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      RayTracingShaderGroupCreateInfoKHR( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ =
                                            VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral,
                                          uint32_t     generalShader_                   = {},
                                          uint32_t     closestHitShader_                = {},
                                          uint32_t     anyHitShader_                    = {},
                                          uint32_t     intersectionShader_              = {},
                                          const void * pShaderGroupCaptureReplayHandle_ = {} ) VULKAN_HPP_NOEXCEPT
      : type( type_ )
      , generalShader( generalShader_ )
      , closestHitShader( closestHitShader_ )
      , anyHitShader( anyHitShader_ )
      , intersectionShader( intersectionShader_ )
      , pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ )
    {}

    VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast<RayTracingShaderGroupCreateInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR &
                            operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingShaderGroupCreateInfoKHR &
      operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const *>( &rhs );
      return *this;
    }

    RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoKHR &
      setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
    {
      generalShader = generalShader_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
    {
      closestHitShader = closestHitShader_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
    {
      anyHitShader = anyHitShader_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
    {
      intersectionShader = intersectionShader_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoKHR &
      setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_;
      return *this;
    }

    operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR *>( this );
    }

    operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default;
#else
    bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) &&
             ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) &&
             ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) &&
             ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle );
    }

    bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
    const void *                                       pNext = {};
    VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type =
      VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
    uint32_t     generalShader                   = {};
    uint32_t     closestHitShader                = {};
    uint32_t     anyHitShader                    = {};
    uint32_t     intersectionShader              = {};
    const void * pShaderGroupCaptureReplayHandle = {};
  };
  static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoKHR>
  {
    using Type = RayTracingShaderGroupCreateInfoKHR;
  };

  struct PipelineLibraryCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PipelineLibraryCreateInfoKHR( uint32_t                               libraryCount_ = {},
                                    const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_   = {} ) VULKAN_HPP_NOEXCEPT
      : libraryCount( libraryCount_ )
      , pLibraries( pLibraries_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineLibraryCreateInfoKHR( *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLibraryCreateInfoKHR(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ )
      : libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR &
                            operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const *>( &rhs );
      return *this;
    }

    PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      libraryCount = libraryCount_;
      return *this;
    }

    PipelineLibraryCreateInfoKHR &
      setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT
    {
      pLibraries = pLibraries_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineLibraryCreateInfoKHR & setLibraries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ )
      VULKAN_HPP_NOEXCEPT
    {
      libraryCount = static_cast<uint32_t>( libraries_.size() );
      pLibraries   = libraries_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR *>( this );
    }

    operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default;
#else
    bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) &&
             ( pLibraries == rhs.pLibraries );
    }

    bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType        = StructureType::ePipelineLibraryCreateInfoKHR;
    const void *                           pNext        = {};
    uint32_t                               libraryCount = {};
    const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries   = {};
  };
  static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineLibraryCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
  {
    using Type = PipelineLibraryCreateInfoKHR;
  };

  struct RayTracingPipelineInterfaceCreateInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_      = {},
                                                uint32_t maxPipelineRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ )
      , maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ )
    {}

    VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(
      RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : RayTracingPipelineInterfaceCreateInfoKHR(
          *reinterpret_cast<RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR &
                            operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineInterfaceCreateInfoKHR &
      operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

    RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RayTracingPipelineInterfaceCreateInfoKHR &
      setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_;
      return *this;
    }

    RayTracingPipelineInterfaceCreateInfoKHR &
      setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_;
      return *this;
    }

    operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR *>( this );
    }

    operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default;
#else
    bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) &&
             ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize );
    }

    bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
    const void *                        pNext = {};
    uint32_t                            maxPipelineRayPayloadSize      = {};
    uint32_t                            maxPipelineRayHitAttributeSize = {};
  };
  static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) ==
                   sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RayTracingPipelineInterfaceCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRayTracingPipelineInterfaceCreateInfoKHR>
  {
    using Type = RayTracingPipelineInterfaceCreateInfoKHR;
  };

  struct RayTracingPipelineCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(
      VULKAN_HPP_NAMESPACE::PipelineCreateFlags                              flags_                        = {},
      uint32_t                                                               stageCount_                   = {},
      const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *            pStages_                      = {},
      uint32_t                                                               groupCount_                   = {},
      const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR *       pGroups_                      = {},
      uint32_t                                                               maxPipelineRayRecursionDepth_ = {},
      const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR *             pLibraryInfo_                 = {},
      const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_            = {},
      const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *           pDynamicState_                = {},
      VULKAN_HPP_NAMESPACE::PipelineLayout                                   layout_                       = {},
      VULKAN_HPP_NAMESPACE::Pipeline                                         basePipelineHandle_           = {},
      int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , stageCount( stageCount_ )
      , pStages( pStages_ )
      , groupCount( groupCount_ )
      , pGroups( pGroups_ )
      , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ )
      , pLibraryInfo( pLibraryInfo_ )
      , pLibraryInterface( pLibraryInterface_ )
      , pDynamicState( pDynamicState_ )
      , layout( layout_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingPipelineCreateInfoKHR( *reinterpret_cast<RayTracingPipelineCreateInfoKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoKHR(
      VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
        stages_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_                       = {},
      uint32_t                                                                  maxPipelineRayRecursionDepth_ = {},
      const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR *                pLibraryInfo_                 = {},
      const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR *    pLibraryInterface_            = {},
      const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *              pDynamicState_                = {},
      VULKAN_HPP_NAMESPACE::PipelineLayout                                      layout_                       = {},
      VULKAN_HPP_NAMESPACE::Pipeline                                            basePipelineHandle_           = {},
      int32_t                                                                   basePipelineIndex_            = {} )
      : flags( flags_ )
      , stageCount( static_cast<uint32_t>( stages_.size() ) )
      , pStages( stages_.data() )
      , groupCount( static_cast<uint32_t>( groups_.size() ) )
      , pGroups( groups_.data() )
      , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ )
      , pLibraryInfo( pLibraryInfo_ )
      , pLibraryInterface( pLibraryInterface_ )
      , pDynamicState( pDynamicState_ )
      , layout( layout_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR &
                            operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const *>( &rhs );
      return *this;
    }

    RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = stageCount_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR &
      setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pStages = pStages_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoKHR & setStages(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
        stages_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = static_cast<uint32_t>( stages_.size() );
      pStages    = stages_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = groupCount_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR &
      setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT
    {
      pGroups = pGroups_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoKHR &
      setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
                 const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = static_cast<uint32_t>( groups_.size() );
      pGroups    = groups_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RayTracingPipelineCreateInfoKHR &
      setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR &
      setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pLibraryInfo = pLibraryInfo_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR & setPLibraryInterface(
      const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
    {
      pLibraryInterface = pLibraryInterface_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR & setPDynamicState(
      const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT
    {
      pDynamicState = pDynamicState_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR &
      setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineHandle = basePipelineHandle_;
      return *this;
    }

    RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineIndex = basePipelineIndex_;
      return *this;
    }

    operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( this );
    }

    operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingPipelineCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default;
#else
    bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) &&
             ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) &&
             ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) &&
             ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) &&
             ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
    }

    bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                         sType = StructureType::eRayTracingPipelineCreateInfoKHR;
    const void *                                                pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineCreateFlags                   flags = {};
    uint32_t                                                    stageCount                              = {};
    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages                                 = {};
    uint32_t                                                    groupCount                              = {};
    const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR *       pGroups                      = {};
    uint32_t                                                               maxPipelineRayRecursionDepth = {};
    const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR *             pLibraryInfo                 = {};
    const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface            = {};
    const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo *           pDynamicState                = {};
    VULKAN_HPP_NAMESPACE::PipelineLayout                                   layout                       = {};
    VULKAN_HPP_NAMESPACE::Pipeline                                         basePipelineHandle           = {};
    int32_t                                                                basePipelineIndex            = {};
  };
  static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoKHR>
  {
    using Type = RayTracingPipelineCreateInfoKHR;
  };

  struct RayTracingShaderGroupCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eRayTracingShaderGroupCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ =
                                           VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral,
                                         uint32_t generalShader_      = {},
                                         uint32_t closestHitShader_   = {},
                                         uint32_t anyHitShader_       = {},
                                         uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT
      : type( type_ )
      , generalShader( generalShader_ )
      , closestHitShader( closestHitShader_ )
      , anyHitShader( anyHitShader_ )
      , intersectionShader( intersectionShader_ )
    {}

    VULKAN_HPP_CONSTEXPR
      RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast<RayTracingShaderGroupCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV &
                            operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>( &rhs );
      return *this;
    }

    RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoNV &
      setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
    {
      generalShader = generalShader_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
    {
      closestHitShader = closestHitShader_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
    {
      anyHitShader = anyHitShader_;
      return *this;
    }

    RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
    {
      intersectionShader = intersectionShader_;
      return *this;
    }

    operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV *>( this );
    }

    operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default;
#else
    bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) &&
             ( generalShader == rhs.generalShader ) && ( closestHitShader == rhs.closestHitShader ) &&
             ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader );
    }

    bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
    const void *                                       pNext = {};
    VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type =
      VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
    uint32_t generalShader      = {};
    uint32_t closestHitShader   = {};
    uint32_t anyHitShader       = {};
    uint32_t intersectionShader = {};
  };
  static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoNV>
  {
    using Type = RayTracingShaderGroupCreateInfoNV;
  };

  struct RayTracingPipelineCreateInfoNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags                       flags_      = {},
                                      uint32_t                                                        stageCount_ = {},
                                      const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *     pStages_    = {},
                                      uint32_t                                                        groupCount_ = {},
                                      const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_    = {},
                                      uint32_t                             maxRecursionDepth_                     = {},
                                      VULKAN_HPP_NAMESPACE::PipelineLayout layout_                                = {},
                                      VULKAN_HPP_NAMESPACE::Pipeline       basePipelineHandle_                    = {},
                                      int32_t                              basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , stageCount( stageCount_ )
      , pStages( pStages_ )
      , groupCount( groupCount_ )
      , pGroups( pGroups_ )
      , maxRecursionDepth( maxRecursionDepth_ )
      , layout( layout_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : RayTracingPipelineCreateInfoNV( *reinterpret_cast<RayTracingPipelineCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
        stages_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_             = {},
      uint32_t                                                                 maxRecursionDepth_  = {},
      VULKAN_HPP_NAMESPACE::PipelineLayout                                     layout_             = {},
      VULKAN_HPP_NAMESPACE::Pipeline                                           basePipelineHandle_ = {},
      int32_t                                                                  basePipelineIndex_  = {} )
      : flags( flags_ )
      , stageCount( static_cast<uint32_t>( stages_.size() ) )
      , pStages( stages_.data() )
      , groupCount( static_cast<uint32_t>( groups_.size() ) )
      , pGroups( groups_.data() )
      , maxRecursionDepth( maxRecursionDepth_ )
      , layout( layout_ )
      , basePipelineHandle( basePipelineHandle_ )
      , basePipelineIndex( basePipelineIndex_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV &
                            operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>( &rhs );
      return *this;
    }

    RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = stageCount_;
      return *this;
    }

    RayTracingPipelineCreateInfoNV &
      setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pStages = pStages_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoNV & setStages(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
        stages_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = static_cast<uint32_t>( stages_.size() );
      pStages    = stages_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = groupCount_;
      return *this;
    }

    RayTracingPipelineCreateInfoNV &
      setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
    {
      pGroups = pGroups_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RayTracingPipelineCreateInfoNV &
      setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
                 const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = static_cast<uint32_t>( groups_.size() );
      pGroups    = groups_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      maxRecursionDepth = maxRecursionDepth_;
      return *this;
    }

    RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
    {
      layout = layout_;
      return *this;
    }

    RayTracingPipelineCreateInfoNV &
      setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineHandle = basePipelineHandle_;
      return *this;
    }

    RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      basePipelineIndex = basePipelineIndex_;
      return *this;
    }

    operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( this );
    }

    operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default;
#else
    bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( groupCount == rhs.groupCount ) &&
             ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) &&
             ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex );
    }

    bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                         sType = StructureType::eRayTracingPipelineCreateInfoNV;
    const void *                                                pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineCreateFlags                   flags = {};
    uint32_t                                                    stageCount             = {};
    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages                = {};
    uint32_t                                                    groupCount             = {};
    const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups            = {};
    uint32_t                                                        maxRecursionDepth  = {};
    VULKAN_HPP_NAMESPACE::PipelineLayout                            layout             = {};
    VULKAN_HPP_NAMESPACE::Pipeline                                  basePipelineHandle = {};
    int32_t                                                         basePipelineIndex  = {};
  };
  static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoNV>
  {
    using Type = RayTracingPipelineCreateInfoNV;
  };

  struct SubpassDescription
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassDescription(
      VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_       = {},
      VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
      uint32_t                                inputAttachmentCount_              = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_       = {},
      uint32_t                                          colorAttachmentCount_    = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_       = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_     = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {},
      uint32_t                                          preserveAttachmentCount_ = {},
      const uint32_t *                                  pPreserveAttachments_    = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , inputAttachmentCount( inputAttachmentCount_ )
      , pInputAttachments( pInputAttachments_ )
      , colorAttachmentCount( colorAttachmentCount_ )
      , pColorAttachments( pColorAttachments_ )
      , pResolveAttachments( pResolveAttachments_ )
      , pDepthStencilAttachment( pDepthStencilAttachment_ )
      , preserveAttachmentCount( preserveAttachmentCount_ )
      , pPreserveAttachments( pPreserveAttachments_ )
    {}

    VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassDescription( *reinterpret_cast<SubpassDescription const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription(
      VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_,
      VULKAN_HPP_NAMESPACE::PipelineBindPoint       pipelineBindPoint_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
        inputAttachments_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
        colorAttachments_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
                                                                            resolveAttachments_      = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference *                     pDepthStencilAttachment_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_     = {} )
      : flags( flags_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) )
      , pInputAttachments( inputAttachments_.data() )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) )
      , pColorAttachments( colorAttachments_.data() )
      , pResolveAttachments( resolveAttachments_.data() )
      , pDepthStencilAttachment( pDepthStencilAttachment_ )
      , preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) )
      , pPreserveAttachments( preserveAttachments_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
#    else
      if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription &
                            operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>( &rhs );
      return *this;
    }

    SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    SubpassDescription &
      setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentCount = inputAttachmentCount_;
      return *this;
    }

    SubpassDescription &
      setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pInputAttachments = pInputAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription & setInputAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
        inputAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
      pInputAttachments    = inputAttachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    SubpassDescription &
      setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachments = pColorAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription & setColorAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
        colorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
      pColorAttachments    = colorAttachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubpassDescription & setPResolveAttachments(
      const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pResolveAttachments = pResolveAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription & setResolveAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const &
        resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
      pResolveAttachments  = resolveAttachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubpassDescription & setPDepthStencilAttachment(
      const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthStencilAttachment = pDepthStencilAttachment_;
      return *this;
    }

    SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      preserveAttachmentCount = preserveAttachmentCount_;
      return *this;
    }

    SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pPreserveAttachments = pPreserveAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription & setPreserveAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
      pPreserveAttachments    = preserveAttachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDescription *>( this );
    }

    operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDescription *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDescription const & ) const = default;
#else
    bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
             ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) &&
             ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) &&
             ( pResolveAttachments == rhs.pResolveAttachments ) &&
             ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) &&
             ( preserveAttachmentCount == rhs.preserveAttachmentCount ) &&
             ( pPreserveAttachments == rhs.pPreserveAttachments );
    }

    bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags          = {};
    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint    = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
    uint32_t                                inputAttachmentCount = {};
    const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments       = {};
    uint32_t                                          colorAttachmentCount    = {};
    const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments       = {};
    const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments     = {};
    const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {};
    uint32_t                                          preserveAttachmentCount = {};
    const uint32_t *                                  pPreserveAttachments    = {};
  };
  static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubpassDescription>::value, "struct wrapper is not a standard layout!" );

  struct SubpassDependency
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SubpassDependency( uint32_t                                 srcSubpass_      = {},
                         uint32_t                                 dstSubpass_      = {},
                         VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_    = {},
                         VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_    = {},
                         VULKAN_HPP_NAMESPACE::AccessFlags        srcAccessMask_   = {},
                         VULKAN_HPP_NAMESPACE::AccessFlags        dstAccessMask_   = {},
                         VULKAN_HPP_NAMESPACE::DependencyFlags    dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubpass( srcSubpass_ )
      , dstSubpass( dstSubpass_ )
      , srcStageMask( srcStageMask_ )
      , dstStageMask( dstStageMask_ )
      , srcAccessMask( srcAccessMask_ )
      , dstAccessMask( dstAccessMask_ )
      , dependencyFlags( dependencyFlags_ )
    {}

    VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassDependency( *reinterpret_cast<SubpassDependency const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency &
                            operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>( &rhs );
      return *this;
    }

    SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubpass = srcSubpass_;
      return *this;
    }

    SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubpass = dstSubpass_;
      return *this;
    }

    SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyFlags = dependencyFlags_;
      return *this;
    }

    operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDependency *>( this );
    }

    operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDependency *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDependency const & ) const = default;
#else
    bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) &&
             ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) &&
             ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) &&
             ( dependencyFlags == rhs.dependencyFlags );
    }

    bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                 srcSubpass      = {};
    uint32_t                                 dstSubpass      = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask    = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask    = {};
    VULKAN_HPP_NAMESPACE::AccessFlags        srcAccessMask   = {};
    VULKAN_HPP_NAMESPACE::AccessFlags        dstAccessMask   = {};
    VULKAN_HPP_NAMESPACE::DependencyFlags    dependencyFlags = {};
  };
  static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubpassDependency>::value, "struct wrapper is not a standard layout!" );

  struct RenderPassCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags         flags_           = {},
                            uint32_t                                            attachmentCount_ = {},
                            const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_    = {},
                            uint32_t                                            subpassCount_    = {},
                            const VULKAN_HPP_NAMESPACE::SubpassDescription *    pSubpasses_      = {},
                            uint32_t                                            dependencyCount_ = {},
                            const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , attachmentCount( attachmentCount_ )
      , pAttachments( pAttachments_ )
      , subpassCount( subpassCount_ )
      , pSubpasses( pSubpasses_ )
      , dependencyCount( dependencyCount_ )
      , pDependencies( pDependencies_ )
    {}

    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassCreateInfo( *reinterpret_cast<RenderPassCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo(
      VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const &
        attachments_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const &
        subpasses_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const &
        dependencies_ = {} )
      : flags( flags_ )
      , attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
      , pAttachments( attachments_.data() )
      , subpassCount( static_cast<uint32_t>( subpasses_.size() ) )
      , pSubpasses( subpasses_.data() )
      , dependencyCount( static_cast<uint32_t>( dependencies_.size() ) )
      , pDependencies( dependencies_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo &
                            operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>( &rhs );
      return *this;
    }

    RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    RenderPassCreateInfo &
      setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo & setAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const &
        attachments_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = subpassCount_;
      return *this;
    }

    RenderPassCreateInfo &
      setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
    {
      pSubpasses = pSubpasses_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo & setSubpasses(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ )
      VULKAN_HPP_NOEXCEPT
    {
      subpassCount = static_cast<uint32_t>( subpasses_.size() );
      pSubpasses   = subpasses_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = dependencyCount_;
      return *this;
    }

    RenderPassCreateInfo &
      setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT
    {
      pDependencies = pDependencies_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo & setDependencies(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const &
        dependencies_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = static_cast<uint32_t>( dependencies_.size() );
      pDependencies   = dependencies_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassCreateInfo *>( this );
    }

    operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassCreateInfo const & ) const = default;
#else
    bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) &&
             ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
             ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies );
    }

    bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType           = StructureType::eRenderPassCreateInfo;
    const void *                                        pNext           = {};
    VULKAN_HPP_NAMESPACE::RenderPassCreateFlags         flags           = {};
    uint32_t                                            attachmentCount = {};
    const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments    = {};
    uint32_t                                            subpassCount    = {};
    const VULKAN_HPP_NAMESPACE::SubpassDescription *    pSubpasses      = {};
    uint32_t                                            dependencyCount = {};
    const VULKAN_HPP_NAMESPACE::SubpassDependency *     pDependencies   = {};
  };
  static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
  {
    using Type = RenderPassCreateInfo;
  };

  struct SubpassDescription2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassDescription2(
      VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_       = {},
      VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
      uint32_t                                viewMask_          = {},
      uint32_t                                inputAttachmentCount_               = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_       = {},
      uint32_t                                           colorAttachmentCount_    = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_       = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_     = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {},
      uint32_t                                           preserveAttachmentCount_ = {},
      const uint32_t *                                   pPreserveAttachments_    = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , viewMask( viewMask_ )
      , inputAttachmentCount( inputAttachmentCount_ )
      , pInputAttachments( pInputAttachments_ )
      , colorAttachmentCount( colorAttachmentCount_ )
      , pColorAttachments( pColorAttachments_ )
      , pResolveAttachments( pResolveAttachments_ )
      , pDepthStencilAttachment( pDepthStencilAttachment_ )
      , preserveAttachmentCount( preserveAttachmentCount_ )
      , pPreserveAttachments( pPreserveAttachments_ )
    {}

    VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassDescription2( *reinterpret_cast<SubpassDescription2 const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2(
      VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_,
      VULKAN_HPP_NAMESPACE::PipelineBindPoint       pipelineBindPoint_,
      uint32_t                                      viewMask_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
        inputAttachments_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
        colorAttachments_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
                                                                            resolveAttachments_      = {},
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 *                    pDepthStencilAttachment_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_     = {} )
      : flags( flags_ )
      , pipelineBindPoint( pipelineBindPoint_ )
      , viewMask( viewMask_ )
      , inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) )
      , pInputAttachments( inputAttachments_.data() )
      , colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) )
      , pColorAttachments( colorAttachments_.data() )
      , pResolveAttachments( resolveAttachments_.data() )
      , pDepthStencilAttachment( pDepthStencilAttachment_ )
      , preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) )
      , pPreserveAttachments( preserveAttachments_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
#    else
      if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 &
                            operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>( &rhs );
      return *this;
    }

    SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    SubpassDescription2 &
      setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
    {
      viewMask = viewMask_;
      return *this;
    }

    SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentCount = inputAttachmentCount_;
      return *this;
    }

    SubpassDescription2 &
      setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pInputAttachments = pInputAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2 & setInputAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
        inputAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
      pInputAttachments    = inputAttachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = colorAttachmentCount_;
      return *this;
    }

    SubpassDescription2 &
      setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorAttachments = pColorAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2 & setColorAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
        colorAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
      pColorAttachments    = colorAttachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubpassDescription2 & setPResolveAttachments(
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pResolveAttachments = pResolveAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2 & setResolveAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const &
        resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
      pResolveAttachments  = resolveAttachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubpassDescription2 & setPDepthStencilAttachment(
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthStencilAttachment = pDepthStencilAttachment_;
      return *this;
    }

    SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      preserveAttachmentCount = preserveAttachmentCount_;
      return *this;
    }

    SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pPreserveAttachments = pPreserveAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubpassDescription2 & setPreserveAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
      pPreserveAttachments    = preserveAttachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDescription2 *>( this );
    }

    operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDescription2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDescription2 const & ) const = default;
#else
    bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( viewMask == rhs.viewMask ) &&
             ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) &&
             ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) &&
             ( pResolveAttachments == rhs.pResolveAttachments ) &&
             ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) &&
             ( preserveAttachmentCount == rhs.preserveAttachmentCount ) &&
             ( pPreserveAttachments == rhs.pPreserveAttachments );
    }

    bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType           sType          = StructureType::eSubpassDescription2;
    const void *                                  pNext          = {};
    VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags          = {};
    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint    = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
    uint32_t                                viewMask             = {};
    uint32_t                                inputAttachmentCount = {};
    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments       = {};
    uint32_t                                           colorAttachmentCount    = {};
    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments       = {};
    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments     = {};
    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {};
    uint32_t                                           preserveAttachmentCount = {};
    const uint32_t *                                   pPreserveAttachments    = {};
  };
  static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubpassDescription2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSubpassDescription2>
  {
    using Type = SubpassDescription2;
  };
  using SubpassDescription2KHR = SubpassDescription2;

  struct SubpassDependency2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t                                 srcSubpass_      = {},
                                             uint32_t                                 dstSubpass_      = {},
                                             VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_    = {},
                                             VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_    = {},
                                             VULKAN_HPP_NAMESPACE::AccessFlags        srcAccessMask_   = {},
                                             VULKAN_HPP_NAMESPACE::AccessFlags        dstAccessMask_   = {},
                                             VULKAN_HPP_NAMESPACE::DependencyFlags    dependencyFlags_ = {},
                                             int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcSubpass( srcSubpass_ )
      , dstSubpass( dstSubpass_ )
      , srcStageMask( srcStageMask_ )
      , dstStageMask( dstStageMask_ )
      , srcAccessMask( srcAccessMask_ )
      , dstAccessMask( dstAccessMask_ )
      , dependencyFlags( dependencyFlags_ )
      , viewOffset( viewOffset_ )
    {}

    VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassDependency2( *reinterpret_cast<SubpassDependency2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 &
                            operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>( &rhs );
      return *this;
    }

    SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
    {
      srcSubpass = srcSubpass_;
      return *this;
    }

    SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
    {
      dstSubpass = dstSubpass_;
      return *this;
    }

    SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcStageMask = srcStageMask_;
      return *this;
    }

    SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstStageMask = dstStageMask_;
      return *this;
    }

    SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      srcAccessMask = srcAccessMask_;
      return *this;
    }

    SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
    {
      dstAccessMask = dstAccessMask_;
      return *this;
    }

    SubpassDependency2 &
      setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyFlags = dependencyFlags_;
      return *this;
    }

    SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      viewOffset = viewOffset_;
      return *this;
    }

    operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDependency2 *>( this );
    }

    operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDependency2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDependency2 const & ) const = default;
#else
    bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) &&
             ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) &&
             ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) &&
             ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) &&
             ( viewOffset == rhs.viewOffset );
    }

    bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType           = StructureType::eSubpassDependency2;
    const void *                             pNext           = {};
    uint32_t                                 srcSubpass      = {};
    uint32_t                                 dstSubpass      = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask    = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask    = {};
    VULKAN_HPP_NAMESPACE::AccessFlags        srcAccessMask   = {};
    VULKAN_HPP_NAMESPACE::AccessFlags        dstAccessMask   = {};
    VULKAN_HPP_NAMESPACE::DependencyFlags    dependencyFlags = {};
    int32_t                                  viewOffset      = {};
  };
  static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubpassDependency2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSubpassDependency2>
  {
    using Type = SubpassDependency2;
  };
  using SubpassDependency2KHR = SubpassDependency2;

  struct RenderPassCreateInfo2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_                 = {},
                                                uint32_t                                    attachmentCount_       = {},
                                                const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {},
                                                uint32_t                                             subpassCount_ = {},
                                                const VULKAN_HPP_NAMESPACE::SubpassDescription2 *    pSubpasses_   = {},
                                                uint32_t                                         dependencyCount_  = {},
                                                const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_    = {},
                                                uint32_t         correlatedViewMaskCount_                          = {},
                                                const uint32_t * pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , attachmentCount( attachmentCount_ )
      , pAttachments( pAttachments_ )
      , subpassCount( subpassCount_ )
      , pSubpasses( pSubpasses_ )
      , dependencyCount( dependencyCount_ )
      , pDependencies( pDependencies_ )
      , correlatedViewMaskCount( correlatedViewMaskCount_ )
      , pCorrelatedViewMasks( pCorrelatedViewMasks_ )
    {}

    VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassCreateInfo2( *reinterpret_cast<RenderPassCreateInfo2 const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2(
      VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const &
        attachments_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const &
        subpasses_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const &
                                                                            dependencies_        = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {} )
      : flags( flags_ )
      , attachmentCount( static_cast<uint32_t>( attachments_.size() ) )
      , pAttachments( attachments_.data() )
      , subpassCount( static_cast<uint32_t>( subpasses_.size() ) )
      , pSubpasses( subpasses_.data() )
      , dependencyCount( static_cast<uint32_t>( dependencies_.size() ) )
      , pDependencies( dependencies_.data() )
      , correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) )
      , pCorrelatedViewMasks( correlatedViewMasks_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 &
                            operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>( &rhs );
      return *this;
    }

    RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    RenderPassCreateInfo2 &
      setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2 & setAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const &
        attachments_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = subpassCount_;
      return *this;
    }

    RenderPassCreateInfo2 &
      setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT
    {
      pSubpasses = pSubpasses_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2 & setSubpasses(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const &
        subpasses_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = static_cast<uint32_t>( subpasses_.size() );
      pSubpasses   = subpasses_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = dependencyCount_;
      return *this;
    }

    RenderPassCreateInfo2 &
      setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT
    {
      pDependencies = pDependencies_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2 & setDependencies(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const &
        dependencies_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = static_cast<uint32_t>( dependencies_.size() );
      pDependencies   = dependencies_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
    {
      correlatedViewMaskCount = correlatedViewMaskCount_;
      return *this;
    }

    RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pCorrelatedViewMasks = pCorrelatedViewMasks_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassCreateInfo2 & setCorrelatedViewMasks(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
      pCorrelatedViewMasks    = correlatedViewMasks_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassCreateInfo2 *>( this );
    }

    operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassCreateInfo2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassCreateInfo2 const & ) const = default;
#else
    bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) &&
             ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) &&
             ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) &&
             ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) &&
             ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
    }

    bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                  sType           = StructureType::eRenderPassCreateInfo2;
    const void *                                         pNext           = {};
    VULKAN_HPP_NAMESPACE::RenderPassCreateFlags          flags           = {};
    uint32_t                                             attachmentCount = {};
    const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments    = {};
    uint32_t                                             subpassCount    = {};
    const VULKAN_HPP_NAMESPACE::SubpassDescription2 *    pSubpasses      = {};
    uint32_t                                             dependencyCount = {};
    const VULKAN_HPP_NAMESPACE::SubpassDependency2 *     pDependencies   = {};
    uint32_t                                             correlatedViewMaskCount = {};
    const uint32_t *                                     pCorrelatedViewMasks    = {};
  };
  static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
  {
    using Type = RenderPassCreateInfo2;
  };
  using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;

  struct SamplerCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerCreateInfo(
      VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_            = {},
      VULKAN_HPP_NAMESPACE::Filter             magFilter_        = VULKAN_HPP_NAMESPACE::Filter::eNearest,
      VULKAN_HPP_NAMESPACE::Filter             minFilter_        = VULKAN_HPP_NAMESPACE::Filter::eNearest,
      VULKAN_HPP_NAMESPACE::SamplerMipmapMode  mipmapMode_       = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest,
      VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_     = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
      VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_     = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
      VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_     = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat,
      float                                    mipLodBias_       = {},
      VULKAN_HPP_NAMESPACE::Bool32             anisotropyEnable_ = {},
      float                                    maxAnisotropy_    = {},
      VULKAN_HPP_NAMESPACE::Bool32             compareEnable_    = {},
      VULKAN_HPP_NAMESPACE::CompareOp          compareOp_        = VULKAN_HPP_NAMESPACE::CompareOp::eNever,
      float                                    minLod_           = {},
      float                                    maxLod_           = {},
      VULKAN_HPP_NAMESPACE::BorderColor        borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack,
      VULKAN_HPP_NAMESPACE::Bool32             unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , magFilter( magFilter_ )
      , minFilter( minFilter_ )
      , mipmapMode( mipmapMode_ )
      , addressModeU( addressModeU_ )
      , addressModeV( addressModeV_ )
      , addressModeW( addressModeW_ )
      , mipLodBias( mipLodBias_ )
      , anisotropyEnable( anisotropyEnable_ )
      , maxAnisotropy( maxAnisotropy_ )
      , compareEnable( compareEnable_ )
      , compareOp( compareOp_ )
      , minLod( minLod_ )
      , maxLod( maxLod_ )
      , borderColor( borderColor_ )
      , unnormalizedCoordinates( unnormalizedCoordinates_ )
    {}

    VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerCreateInfo( *reinterpret_cast<SamplerCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo &
                            operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>( &rhs );
      return *this;
    }

    SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
    {
      magFilter = magFilter_;
      return *this;
    }

    SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
    {
      minFilter = minFilter_;
      return *this;
    }

    SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
    {
      mipmapMode = mipmapMode_;
      return *this;
    }

    SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
    {
      addressModeU = addressModeU_;
      return *this;
    }

    SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
    {
      addressModeV = addressModeV_;
      return *this;
    }

    SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
    {
      addressModeW = addressModeW_;
      return *this;
    }

    SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
    {
      mipLodBias = mipLodBias_;
      return *this;
    }

    SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      anisotropyEnable = anisotropyEnable_;
      return *this;
    }

    SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
    {
      maxAnisotropy = maxAnisotropy_;
      return *this;
    }

    SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      compareEnable = compareEnable_;
      return *this;
    }

    SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
    {
      compareOp = compareOp_;
      return *this;
    }

    SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
    {
      minLod = minLod_;
      return *this;
    }

    SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
    {
      maxLod = maxLod_;
      return *this;
    }

    SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
    {
      borderColor = borderColor_;
      return *this;
    }

    SamplerCreateInfo &
      setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
    {
      unnormalizedCoordinates = unnormalizedCoordinates_;
      return *this;
    }

    operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerCreateInfo *>( this );
    }

    operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerCreateInfo const & ) const = default;
#else
    bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && ( mipmapMode == rhs.mipmapMode ) &&
             ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) &&
             ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) &&
             ( anisotropyEnable == rhs.anisotropyEnable ) && ( maxAnisotropy == rhs.maxAnisotropy ) &&
             ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) &&
             ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) &&
             ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
    }

    bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType            = StructureType::eSamplerCreateInfo;
    const void *                             pNext            = {};
    VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags            = {};
    VULKAN_HPP_NAMESPACE::Filter             magFilter        = VULKAN_HPP_NAMESPACE::Filter::eNearest;
    VULKAN_HPP_NAMESPACE::Filter             minFilter        = VULKAN_HPP_NAMESPACE::Filter::eNearest;
    VULKAN_HPP_NAMESPACE::SamplerMipmapMode  mipmapMode       = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU     = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV     = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
    VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW     = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
    float                                    mipLodBias       = {};
    VULKAN_HPP_NAMESPACE::Bool32             anisotropyEnable = {};
    float                                    maxAnisotropy    = {};
    VULKAN_HPP_NAMESPACE::Bool32             compareEnable    = {};
    VULKAN_HPP_NAMESPACE::CompareOp          compareOp        = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
    float                                    minLod           = {};
    float                                    maxLod           = {};
    VULKAN_HPP_NAMESPACE::BorderColor        borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
    VULKAN_HPP_NAMESPACE::Bool32             unnormalizedCoordinates = {};
  };
  static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSamplerCreateInfo>
  {
    using Type = SamplerCreateInfo;
  };

  struct SamplerYcbcrConversionCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(
      VULKAN_HPP_NAMESPACE::Format                      format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ =
        VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity,
      VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_    = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull,
      VULKAN_HPP_NAMESPACE::ComponentMapping  components_    = {},
      VULKAN_HPP_NAMESPACE::ChromaLocation    xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
      VULKAN_HPP_NAMESPACE::ChromaLocation    yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven,
      VULKAN_HPP_NAMESPACE::Filter            chromaFilter_  = VULKAN_HPP_NAMESPACE::Filter::eNearest,
      VULKAN_HPP_NAMESPACE::Bool32            forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT
      : format( format_ )
      , ycbcrModel( ycbcrModel_ )
      , ycbcrRange( ycbcrRange_ )
      , components( components_ )
      , xChromaOffset( xChromaOffset_ )
      , yChromaOffset( yChromaOffset_ )
      , chromaFilter( chromaFilter_ )
      , forceExplicitReconstruction( forceExplicitReconstruction_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerYcbcrConversionCreateInfo( *reinterpret_cast<SamplerYcbcrConversionCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo &
                            operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>( &rhs );
      return *this;
    }

    SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    SamplerYcbcrConversionCreateInfo &
      setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
    {
      ycbcrModel = ycbcrModel_;
      return *this;
    }

    SamplerYcbcrConversionCreateInfo &
      setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
    {
      ycbcrRange = ycbcrRange_;
      return *this;
    }

    SamplerYcbcrConversionCreateInfo &
      setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
    {
      components = components_;
      return *this;
    }

    SamplerYcbcrConversionCreateInfo &
      setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      xChromaOffset = xChromaOffset_;
      return *this;
    }

    SamplerYcbcrConversionCreateInfo &
      setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      yChromaOffset = yChromaOffset_;
      return *this;
    }

    SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
    {
      chromaFilter = chromaFilter_;
      return *this;
    }

    SamplerYcbcrConversionCreateInfo &
      setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
    {
      forceExplicitReconstruction = forceExplicitReconstruction_;
      return *this;
    }

    operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( this );
    }

    operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default;
#else
    bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) &&
             ( ycbcrModel == rhs.ycbcrModel ) && ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) &&
             ( xChromaOffset == rhs.xChromaOffset ) && ( yChromaOffset == rhs.yChromaOffset ) &&
             ( chromaFilter == rhs.chromaFilter ) && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
    }

    bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType  = StructureType::eSamplerYcbcrConversionCreateInfo;
    const void *                                      pNext  = {};
    VULKAN_HPP_NAMESPACE::Format                      format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel =
      VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
    VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange    = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
    VULKAN_HPP_NAMESPACE::ComponentMapping  components    = {};
    VULKAN_HPP_NAMESPACE::ChromaLocation    xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
    VULKAN_HPP_NAMESPACE::ChromaLocation    yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
    VULKAN_HPP_NAMESPACE::Filter            chromaFilter  = VULKAN_HPP_NAMESPACE::Filter::eNearest;
    VULKAN_HPP_NAMESPACE::Bool32            forceExplicitReconstruction = {};
  };
  static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SamplerYcbcrConversionCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
  {
    using Type = SamplerYcbcrConversionCreateInfo;
  };
  using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;

  class SamplerYcbcrConversion
  {
  public:
    using CType = VkSamplerYcbcrConversion;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;

  public:
    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default;
    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT
      SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
      : m_samplerYcbcrConversion( samplerYcbcrConversion )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
    {
      m_samplerYcbcrConversion = samplerYcbcrConversion;
      return *this;
    }
#endif

    SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_samplerYcbcrConversion = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerYcbcrConversion const & ) const = default;
#else
    bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion;
    }

    bool operator!=( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion;
    }

    bool operator<( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
    {
      return m_samplerYcbcrConversion;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_samplerYcbcrConversion != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_samplerYcbcrConversion == VK_NULL_HANDLE;
    }

  private:
    VkSamplerYcbcrConversion m_samplerYcbcrConversion = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSamplerYcbcrConversion>
  {
    using type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion>
  {
    using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion>
  {
    using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };
  using SamplerYcbcrConversionKHR = SamplerYcbcrConversion;

  struct SemaphoreCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreCreateInfo( *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo &
                            operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>( &rhs );
      return *this;
    }

    SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreCreateInfo *>( this );
    }

    operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreCreateInfo const & ) const = default;
#else
    bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
    }

    bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType        sType = StructureType::eSemaphoreCreateInfo;
    const void *                               pNext = {};
    VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
  };
  static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
  {
    using Type = SemaphoreCreateInfo;
  };

  struct ShaderModuleCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_    = {},
                                                 size_t                                        codeSize_ = {},
                                                 const uint32_t * pCode_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , codeSize( codeSize_ )
      , pCode( pCode_ )
    {}

    VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShaderModuleCreateInfo( *reinterpret_cast<ShaderModuleCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags                         flags_,
                            VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ )
      : flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo &
                            operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>( &rhs );
      return *this;
    }

    ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
    {
      codeSize = codeSize_;
      return *this;
    }

    ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT
    {
      pCode = pCode_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ShaderModuleCreateInfo &
      setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
    {
      codeSize = code_.size() * 4;
      pCode    = code_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderModuleCreateInfo *>( this );
    }

    operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderModuleCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShaderModuleCreateInfo const & ) const = default;
#else
    bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode );
    }

    bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType           sType    = StructureType::eShaderModuleCreateInfo;
    const void *                                  pNext    = {};
    VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags    = {};
    size_t                                        codeSize = {};
    const uint32_t *                              pCode    = {};
  };
  static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eShaderModuleCreateInfo>
  {
    using Type = ShaderModuleCreateInfo;
  };

  class SurfaceKHR
  {
  public:
    using CType = VkSurfaceKHR;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;

  public:
    VULKAN_HPP_CONSTEXPR         SurfaceKHR() = default;
    VULKAN_HPP_CONSTEXPR         SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT
    {
      m_surfaceKHR = surfaceKHR;
      return *this;
    }
#endif

    SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_surfaceKHR = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceKHR const & ) const = default;
#else
    bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_surfaceKHR == rhs.m_surfaceKHR;
    }

    bool operator!=( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_surfaceKHR != rhs.m_surfaceKHR;
    }

    bool operator<( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_surfaceKHR < rhs.m_surfaceKHR;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT
    {
      return m_surfaceKHR;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_surfaceKHR != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_surfaceKHR == VK_NULL_HANDLE;
    }

  private:
    VkSurfaceKHR m_surfaceKHR = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eSurfaceKHR>
  {
    using type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SurfaceKHR>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct SwapchainCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(
      VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_         = {},
      VULKAN_HPP_NAMESPACE::SurfaceKHR              surface_       = {},
      uint32_t                                      minImageCount_ = {},
      VULKAN_HPP_NAMESPACE::Format                  imageFormat_   = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      VULKAN_HPP_NAMESPACE::ColorSpaceKHR   imageColorSpace_  = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear,
      VULKAN_HPP_NAMESPACE::Extent2D        imageExtent_      = {},
      uint32_t                              imageArrayLayers_ = {},
      VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_       = {},
      VULKAN_HPP_NAMESPACE::SharingMode     imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
      uint32_t                              queueFamilyIndexCount_ = {},
      const uint32_t *                      pQueueFamilyIndices_   = {},
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ =
        VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
      VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ =
        VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
      VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_  = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
      VULKAN_HPP_NAMESPACE::Bool32         clipped_      = {},
      VULKAN_HPP_NAMESPACE::SwapchainKHR   oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , surface( surface_ )
      , minImageCount( minImageCount_ )
      , imageFormat( imageFormat_ )
      , imageColorSpace( imageColorSpace_ )
      , imageExtent( imageExtent_ )
      , imageArrayLayers( imageArrayLayers_ )
      , imageUsage( imageUsage_ )
      , imageSharingMode( imageSharingMode_ )
      , queueFamilyIndexCount( queueFamilyIndexCount_ )
      , pQueueFamilyIndices( pQueueFamilyIndices_ )
      , preTransform( preTransform_ )
      , compositeAlpha( compositeAlpha_ )
      , presentMode( presentMode_ )
      , clipped( clipped_ )
      , oldSwapchain( oldSwapchain_ )
    {}

    VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainCreateInfoKHR( *reinterpret_cast<SwapchainCreateInfoKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainCreateInfoKHR(
      VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR                         flags_,
      VULKAN_HPP_NAMESPACE::SurfaceKHR                                      surface_,
      uint32_t                                                              minImageCount_,
      VULKAN_HPP_NAMESPACE::Format                                          imageFormat_,
      VULKAN_HPP_NAMESPACE::ColorSpaceKHR                                   imageColorSpace_,
      VULKAN_HPP_NAMESPACE::Extent2D                                        imageExtent_,
      uint32_t                                                              imageArrayLayers_,
      VULKAN_HPP_NAMESPACE::ImageUsageFlags                                 imageUsage_,
      VULKAN_HPP_NAMESPACE::SharingMode                                     imageSharingMode_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_,
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR                     preTransform_ =
        VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
      VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ =
        VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque,
      VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_  = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate,
      VULKAN_HPP_NAMESPACE::Bool32         clipped_      = {},
      VULKAN_HPP_NAMESPACE::SwapchainKHR   oldSwapchain_ = {} )
      : flags( flags_ )
      , surface( surface_ )
      , minImageCount( minImageCount_ )
      , imageFormat( imageFormat_ )
      , imageColorSpace( imageColorSpace_ )
      , imageExtent( imageExtent_ )
      , imageArrayLayers( imageArrayLayers_ )
      , imageUsage( imageUsage_ )
      , imageSharingMode( imageSharingMode_ )
      , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
      , pQueueFamilyIndices( queueFamilyIndices_.data() )
      , preTransform( preTransform_ )
      , compositeAlpha( compositeAlpha_ )
      , presentMode( presentMode_ )
      , clipped( clipped_ )
      , oldSwapchain( oldSwapchain_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR &
                            operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>( &rhs );
      return *this;
    }

    SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
    {
      surface = surface_;
      return *this;
    }

    SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      minImageCount = minImageCount_;
      return *this;
    }

    SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      imageFormat = imageFormat_;
      return *this;
    }

    SwapchainCreateInfoKHR &
      setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
    {
      imageColorSpace = imageColorSpace_;
      return *this;
    }

    SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }

    SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
    {
      imageArrayLayers = imageArrayLayers_;
      return *this;
    }

    SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
    {
      imageUsage = imageUsage_;
      return *this;
    }

    SwapchainCreateInfoKHR &
      setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      imageSharingMode = imageSharingMode_;
      return *this;
    }

    SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = queueFamilyIndexCount_;
      return *this;
    }

    SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueFamilyIndices = pQueueFamilyIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SwapchainCreateInfoKHR & setQueueFamilyIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
      pQueueFamilyIndices   = queueFamilyIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SwapchainCreateInfoKHR &
      setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
    {
      preTransform = preTransform_;
      return *this;
    }

    SwapchainCreateInfoKHR &
      setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
    {
      compositeAlpha = compositeAlpha_;
      return *this;
    }

    SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
    {
      presentMode = presentMode_;
      return *this;
    }

    SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
    {
      clipped = clipped_;
      return *this;
    }

    SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      oldSwapchain = oldSwapchain_;
      return *this;
    }

    operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainCreateInfoKHR *>( this );
    }

    operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainCreateInfoKHR const & ) const = default;
#else
    bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) &&
             ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) &&
             ( imageColorSpace == rhs.imageColorSpace ) && ( imageExtent == rhs.imageExtent ) &&
             ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) &&
             ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
             ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) &&
             ( compositeAlpha == rhs.compositeAlpha ) && ( presentMode == rhs.presentMode ) &&
             ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain );
    }

    bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType           sType           = StructureType::eSwapchainCreateInfoKHR;
    const void *                                  pNext           = {};
    VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags           = {};
    VULKAN_HPP_NAMESPACE::SurfaceKHR              surface         = {};
    uint32_t                                      minImageCount   = {};
    VULKAN_HPP_NAMESPACE::Format                  imageFormat     = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::ColorSpaceKHR           imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
    VULKAN_HPP_NAMESPACE::Extent2D                imageExtent     = {};
    uint32_t                                      imageArrayLayers      = {};
    VULKAN_HPP_NAMESPACE::ImageUsageFlags         imageUsage            = {};
    VULKAN_HPP_NAMESPACE::SharingMode             imageSharingMode      = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
    uint32_t                                      queueFamilyIndexCount = {};
    const uint32_t *                              pQueueFamilyIndices   = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform =
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha =
      VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
    VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode  = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
    VULKAN_HPP_NAMESPACE::Bool32         clipped      = {};
    VULKAN_HPP_NAMESPACE::SwapchainKHR   oldSwapchain = {};
  };
  static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
  {
    using Type = SwapchainCreateInfoKHR;
  };

  struct ValidationCacheCreateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {},
                                                       size_t       initialDataSize_                              = {},
                                                       const void * pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , initialDataSize( initialDataSize_ )
      , pInitialData( pInitialData_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ValidationCacheCreateInfoEXT( *reinterpret_cast<ValidationCacheCreateInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT            flags_,
                                  VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
      : flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT &
                            operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>( &rhs );
      return *this;
    }

    ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ValidationCacheCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      initialDataSize = initialDataSize_;
      return *this;
    }

    ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT
    {
      pInitialData = pInitialData_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    ValidationCacheCreateInfoEXT &
      setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
    {
      initialDataSize = initialData_.size() * sizeof( T );
      pInitialData    = initialData_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( this );
    }

    operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkValidationCacheCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default;
#else
    bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( initialDataSize == rhs.initialDataSize ) && ( pInitialData == rhs.pInitialData );
    }

    bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType           = StructureType::eValidationCacheCreateInfoEXT;
    const void *                                        pNext           = {};
    VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags           = {};
    size_t                                              initialDataSize = {};
    const void *                                        pInitialData    = {};
  };
  static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ValidationCacheCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eValidationCacheCreateInfoEXT>
  {
    using Type = ValidationCacheCreateInfoEXT;
  };

  class ValidationCacheEXT
  {
  public:
    using CType = VkValidationCacheEXT;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;

  public:
    VULKAN_HPP_CONSTEXPR         ValidationCacheEXT() = default;
    VULKAN_HPP_CONSTEXPR         ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
      : m_validationCacheEXT( validationCacheEXT )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
    {
      m_validationCacheEXT = validationCacheEXT;
      return *this;
    }
#endif

    ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_validationCacheEXT = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ValidationCacheEXT const & ) const = default;
#else
    bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_validationCacheEXT == rhs.m_validationCacheEXT;
    }

    bool operator!=( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_validationCacheEXT != rhs.m_validationCacheEXT;
    }

    bool operator<( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_validationCacheEXT < rhs.m_validationCacheEXT;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT
    {
      return m_validationCacheEXT;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_validationCacheEXT != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_validationCacheEXT == VK_NULL_HANDLE;
    }

  private:
    VkValidationCacheEXT m_validationCacheEXT = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eValidationCacheEXT>
  {
    using type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT>
  {
    using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT>
  {
    using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoProfileKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoProfileKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ =
                         VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid,
                       VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {},
                       VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_      = {},
                       VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {} ) VULKAN_HPP_NOEXCEPT
      : videoCodecOperation( videoCodecOperation_ )
      , chromaSubsampling( chromaSubsampling_ )
      , lumaBitDepth( lumaBitDepth_ )
      , chromaBitDepth( chromaBitDepth_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoProfileKHR( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoProfileKHR( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoProfileKHR( *reinterpret_cast<VideoProfileKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoProfileKHR & operator=( VideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoProfileKHR & operator=( VkVideoProfileKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfileKHR const *>( &rhs );
      return *this;
    }

    VideoProfileKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoProfileKHR & setVideoCodecOperation(
      VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT
    {
      videoCodecOperation = videoCodecOperation_;
      return *this;
    }

    VideoProfileKHR & setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ )
      VULKAN_HPP_NOEXCEPT
    {
      chromaSubsampling = chromaSubsampling_;
      return *this;
    }

    VideoProfileKHR &
      setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      lumaBitDepth = lumaBitDepth_;
      return *this;
    }

    VideoProfileKHR &
      setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT
    {
      chromaBitDepth = chromaBitDepth_;
      return *this;
    }

    operator VkVideoProfileKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoProfileKHR *>( this );
    }

    operator VkVideoProfileKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoProfileKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoProfileKHR const & ) const = default;
#  else
    bool operator==( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) &&
             ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) &&
             ( chromaBitDepth == rhs.chromaBitDepth );
    }

    bool operator!=( VideoProfileKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                  sType = StructureType::eVideoProfileKHR;
    void *                                               pNext = {};
    VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation =
      VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eInvalid;
    VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {};
    VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth      = {};
    VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth    = {};
  };
  static_assert( sizeof( VideoProfileKHR ) == sizeof( VkVideoProfileKHR ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoProfileKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoProfileKHR>
  {
    using Type = VideoProfileKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoSessionCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoSessionCreateInfoKHR(
      uint32_t                                         queueFamilyIndex_ = {},
      VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_            = {},
      const VULKAN_HPP_NAMESPACE::VideoProfileKHR *    pVideoProfile_    = {},
      VULKAN_HPP_NAMESPACE::Format                     pictureFormat_    = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      VULKAN_HPP_NAMESPACE::Extent2D                   maxCodedExtent_   = {},
      VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_              = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      uint32_t                     maxReferencePicturesSlotsCount_       = {},
      uint32_t                     maxReferencePicturesActiveCount_      = {} ) VULKAN_HPP_NOEXCEPT
      : queueFamilyIndex( queueFamilyIndex_ )
      , flags( flags_ )
      , pVideoProfile( pVideoProfile_ )
      , pictureFormat( pictureFormat_ )
      , maxCodedExtent( maxCodedExtent_ )
      , referencePicturesFormat( referencePicturesFormat_ )
      , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ )
      , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoSessionCreateInfoKHR( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoSessionCreateInfoKHR( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoSessionCreateInfoKHR( *reinterpret_cast<VideoSessionCreateInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR &
                            operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const *>( &rhs );
      return *this;
    }

    VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoSessionCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    VideoSessionCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VideoSessionCreateInfoKHR &
      setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT
    {
      pVideoProfile = pVideoProfile_;
      return *this;
    }

    VideoSessionCreateInfoKHR & setPictureFormat( VULKAN_HPP_NAMESPACE::Format pictureFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      pictureFormat = pictureFormat_;
      return *this;
    }

    VideoSessionCreateInfoKHR &
      setMaxCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxCodedExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      maxCodedExtent = maxCodedExtent_;
      return *this;
    }

    VideoSessionCreateInfoKHR &
      setReferencePicturesFormat( VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      referencePicturesFormat = referencePicturesFormat_;
      return *this;
    }

    VideoSessionCreateInfoKHR &
      setMaxReferencePicturesSlotsCount( uint32_t maxReferencePicturesSlotsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxReferencePicturesSlotsCount = maxReferencePicturesSlotsCount_;
      return *this;
    }

    VideoSessionCreateInfoKHR &
      setMaxReferencePicturesActiveCount( uint32_t maxReferencePicturesActiveCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxReferencePicturesActiveCount = maxReferencePicturesActiveCount_;
      return *this;
    }

    operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( this );
    }

    operator VkVideoSessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoSessionCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default;
#  else
    bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
             ( flags == rhs.flags ) && ( pVideoProfile == rhs.pVideoProfile ) &&
             ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) &&
             ( referencePicturesFormat == rhs.referencePicturesFormat ) &&
             ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) &&
             ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount );
    }

    bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType            = StructureType::eVideoSessionCreateInfoKHR;
    const void *                                     pNext            = {};
    uint32_t                                         queueFamilyIndex = {};
    VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags            = {};
    const VULKAN_HPP_NAMESPACE::VideoProfileKHR *    pVideoProfile    = {};
    VULKAN_HPP_NAMESPACE::Format                     pictureFormat    = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::Extent2D                   maxCodedExtent   = {};
    VULKAN_HPP_NAMESPACE::Format                     referencePicturesFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    uint32_t                                         maxReferencePicturesSlotsCount  = {};
    uint32_t                                         maxReferencePicturesActiveCount = {};
  };
  static_assert( sizeof( VideoSessionCreateInfoKHR ) == sizeof( VkVideoSessionCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoSessionCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoSessionCreateInfoKHR>
  {
    using Type = VideoSessionCreateInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoSessionParametersCreateInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoSessionParametersCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR(
      VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {},
      VULKAN_HPP_NAMESPACE::VideoSessionKHR           videoSession_                   = {} ) VULKAN_HPP_NOEXCEPT
      : videoSessionParametersTemplate( videoSessionParametersTemplate_ )
      , videoSession( videoSession_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VideoSessionParametersCreateInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    VideoSessionParametersCreateInfoKHR( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoSessionParametersCreateInfoKHR( *reinterpret_cast<VideoSessionParametersCreateInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR &
                            operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoSessionParametersCreateInfoKHR &
      operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const *>( &rhs );
      return *this;
    }

    VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoSessionParametersCreateInfoKHR & setVideoSessionParametersTemplate(
      VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ ) VULKAN_HPP_NOEXCEPT
    {
      videoSessionParametersTemplate = videoSessionParametersTemplate_;
      return *this;
    }

    VideoSessionParametersCreateInfoKHR &
      setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT
    {
      videoSession = videoSession_;
      return *this;
    }

    operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( this );
    }

    operator VkVideoSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoSessionParametersCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default;
#  else
    bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) &&
             ( videoSession == rhs.videoSession );
    }

    bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType             sType = StructureType::eVideoSessionParametersCreateInfoKHR;
    const void *                                    pNext = {};
    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate = {};
    VULKAN_HPP_NAMESPACE::VideoSessionKHR           videoSession                   = {};
  };
  static_assert( sizeof( VideoSessionParametersCreateInfoKHR ) == sizeof( VkVideoSessionParametersCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoSessionParametersCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoSessionParametersCreateInfoKHR>
  {
    using Type = VideoSessionParametersCreateInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  struct DisplayPowerInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ =
                                                VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT
      : powerState( powerState_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPowerInfoEXT( *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT &
                            operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>( &rhs );
      return *this;
    }

    DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
    {
      powerState = powerState_;
      return *this;
    }

    operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPowerInfoEXT *>( this );
    }

    operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPowerInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPowerInfoEXT const & ) const = default;
#else
    bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState );
    }

    bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType        sType      = StructureType::eDisplayPowerInfoEXT;
    const void *                               pNext      = {};
    VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
  };
  static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
  {
    using Type = DisplayPowerInfoEXT;
  };

  struct MappedMemoryRange
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {},
                                            VULKAN_HPP_NAMESPACE::DeviceSize   offset_ = {},
                                            VULKAN_HPP_NAMESPACE::DeviceSize   size_   = {} ) VULKAN_HPP_NOEXCEPT
      : memory( memory_ )
      , offset( offset_ )
      , size( size_ )
    {}

    VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
      : MappedMemoryRange( *reinterpret_cast<MappedMemoryRange const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange &
                            operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>( &rhs );
      return *this;
    }

    MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
    {
      size = size_;
      return *this;
    }

    operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMappedMemoryRange *>( this );
    }

    operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMappedMemoryRange *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MappedMemoryRange const & ) const = default;
#else
    bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) &&
             ( size == rhs.size );
    }

    bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eMappedMemoryRange;
    const void *                        pNext  = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory  memory = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    offset = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    size   = {};
  };
  static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMappedMemoryRange>
  {
    using Type = MappedMemoryRange;
  };

  struct MemoryRequirements
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_           = {},
                                             VULKAN_HPP_NAMESPACE::DeviceSize alignment_      = {},
                                             uint32_t                         memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
      : size( size_ )
      , alignment( alignment_ )
      , memoryTypeBits( memoryTypeBits_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryRequirements( *reinterpret_cast<MemoryRequirements const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryRequirements &
                            operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>( &rhs );
      return *this;
    }

    operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryRequirements *>( this );
    }

    operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryRequirements *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryRequirements const & ) const = default;
#else
    bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits );
    }

    bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DeviceSize size           = {};
    VULKAN_HPP_NAMESPACE::DeviceSize alignment      = {};
    uint32_t                         memoryTypeBits = {};
  };
  static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryRequirements>::value, "struct wrapper is not a standard layout!" );

  struct MemoryRequirements2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryRequirements( memoryRequirements_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryRequirements2( *reinterpret_cast<MemoryRequirements2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryRequirements2 &
                            operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>( &rhs );
      return *this;
    }

    operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryRequirements2 *>( this );
    }

    operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryRequirements2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryRequirements2 const & ) const = default;
#else
    bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements );
    }

    bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType              = StructureType::eMemoryRequirements2;
    void *                                   pNext              = {};
    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
  };
  static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryRequirements2>
  {
    using Type = MemoryRequirements2;
  };
  using MemoryRequirements2KHR = MemoryRequirements2;

  struct DeviceGroupPresentCapabilitiesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDeviceGroupPresentCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(
      std::array<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> const & presentMask_ = {},
      VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR   modes_       = {} ) VULKAN_HPP_NOEXCEPT
      : presentMask( presentMask_ )
      , modes( modes_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast<DeviceGroupPresentCapabilitiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR &
                            operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR *>( this );
    }

    operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default;
#else
    bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) &&
             ( modes == rhs.modes );
    }

    bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR                     modes       = {};
  };
  static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceGroupPresentCapabilitiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
  {
    using Type = DeviceGroupPresentCapabilitiesKHR;
  };

  struct PhysicalDeviceSurfaceInfo2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT
      : surface( surface_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast<PhysicalDeviceSurfaceInfo2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR &
                            operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
    {
      surface = surface_;
      return *this;
    }

    operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( this );
    }

    operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface );
    }

    bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType   = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
    const void *                        pNext   = {};
    VULKAN_HPP_NAMESPACE::SurfaceKHR    surface = {};
  };
  static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSurfaceInfo2KHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
  {
    using Type = PhysicalDeviceSurfaceInfo2KHR;
  };

  struct DeviceMemoryOpaqueCaptureAddressInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT
      : memory( memory_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast<DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo &
                            operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceMemoryOpaqueCaptureAddressInfo &
      operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
      return *this;
    }

    DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
    }

    operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default;
#else
    bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory );
    }

    bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
    const void *                        pNext  = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory  memory = {};
  };
  static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceMemoryOpaqueCaptureAddressInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
  {
    using Type = DeviceMemoryOpaqueCaptureAddressInfo;
  };
  using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;

  struct PresentInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t                                   waitSemaphoreCount_ = {},
                                         const VULKAN_HPP_NAMESPACE::Semaphore *    pWaitSemaphores_    = {},
                                         uint32_t                                   swapchainCount_     = {},
                                         const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_        = {},
                                         const uint32_t *                           pImageIndices_      = {},
                                         VULKAN_HPP_NAMESPACE::Result *             pResults_ = {} ) VULKAN_HPP_NOEXCEPT
      : waitSemaphoreCount( waitSemaphoreCount_ )
      , pWaitSemaphores( pWaitSemaphores_ )
      , swapchainCount( swapchainCount_ )
      , pSwapchains( pSwapchains_ )
      , pImageIndices( pImageIndices_ )
      , pResults( pResults_ )
    {}

    VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentInfoKHR( *reinterpret_cast<PresentInfoKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const &    waitSemaphores_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const &               imageIndices_           = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_                = {} )
      : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
      , pWaitSemaphores( waitSemaphores_.data() )
      , swapchainCount( static_cast<uint32_t>( swapchains_.size() ) )
      , pSwapchains( swapchains_.data() )
      , pImageIndices( imageIndices_.data() )
      , pResults( results_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() );
      VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) );
      VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) );
#    else
      if ( swapchains_.size() != imageIndices_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
      }
      if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
      }
      if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>( &rhs );
      return *this;
    }

    PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = waitSemaphoreCount_;
      return *this;
    }

    PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphores = pWaitSemaphores_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR & setWaitSemaphores(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ )
      VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
      pWaitSemaphores    = waitSemaphores_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT
    {
      pSwapchains = pSwapchains_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR & setSwapchains(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ )
      VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( swapchains_.size() );
      pSwapchains    = swapchains_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pImageIndices = pImageIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR & setImageIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
      pImageIndices  = imageIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT
    {
      pResults = pResults_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentInfoKHR & setResults(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( results_.size() );
      pResults       = results_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentInfoKHR *>( this );
    }

    operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentInfoKHR const & ) const = default;
#else
    bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
             ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) &&
             ( pSwapchains == rhs.pSwapchains ) && ( pImageIndices == rhs.pImageIndices ) &&
             ( pResults == rhs.pResults );
    }

    bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType        sType              = StructureType::ePresentInfoKHR;
    const void *                               pNext              = {};
    uint32_t                                   waitSemaphoreCount = {};
    const VULKAN_HPP_NAMESPACE::Semaphore *    pWaitSemaphores    = {};
    uint32_t                                   swapchainCount     = {};
    const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains        = {};
    const uint32_t *                           pImageIndices      = {};
    VULKAN_HPP_NAMESPACE::Result *             pResults           = {};
  };
  static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePresentInfoKHR>
  {
    using Type = PresentInfoKHR;
  };

  struct SubmitInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SubmitInfo( uint32_t                                         waitSemaphoreCount_   = {},
                  const VULKAN_HPP_NAMESPACE::Semaphore *          pWaitSemaphores_      = {},
                  const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_    = {},
                  uint32_t                                         commandBufferCount_   = {},
                  const VULKAN_HPP_NAMESPACE::CommandBuffer *      pCommandBuffers_      = {},
                  uint32_t                                         signalSemaphoreCount_ = {},
                  const VULKAN_HPP_NAMESPACE::Semaphore *          pSignalSemaphores_    = {} ) VULKAN_HPP_NOEXCEPT
      : waitSemaphoreCount( waitSemaphoreCount_ )
      , pWaitSemaphores( pWaitSemaphores_ )
      , pWaitDstStageMask( pWaitDstStageMask_ )
      , commandBufferCount( commandBufferCount_ )
      , pCommandBuffers( pCommandBuffers_ )
      , signalSemaphoreCount( signalSemaphoreCount_ )
      , pSignalSemaphores( pSignalSemaphores_ )
    {}

    VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubmitInfo( *reinterpret_cast<SubmitInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const &
        waitDstStageMask_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const &
        commandBuffers_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const &
        signalSemaphores_ = {} )
      : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) )
      , pWaitSemaphores( waitSemaphores_.data() )
      , pWaitDstStageMask( waitDstStageMask_.data() )
      , commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) )
      , pCommandBuffers( commandBuffers_.data() )
      , signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) )
      , pSignalSemaphores( signalSemaphores_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() );
#    else
      if ( waitSemaphores_.size() != waitDstStageMask_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>( &rhs );
      return *this;
    }

    SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = waitSemaphoreCount_;
      return *this;
    }

    SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphores = pWaitSemaphores_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo & setWaitSemaphores(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ )
      VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
      pWaitSemaphores    = waitSemaphores_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubmitInfo &
      setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitDstStageMask = pWaitDstStageMask_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo & setWaitDstStageMask(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const &
        waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
      pWaitDstStageMask  = waitDstStageMask_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount = commandBufferCount_;
      return *this;
    }

    SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
    {
      pCommandBuffers = pCommandBuffers_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo & setCommandBuffers(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ )
      VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
      pCommandBuffers    = commandBuffers_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = signalSemaphoreCount_;
      return *this;
    }

    SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphores = pSignalSemaphores_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo & setSignalSemaphores(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ )
      VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
      pSignalSemaphores    = signalSemaphores_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubmitInfo *>( this );
    }

    operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubmitInfo const & ) const = default;
#else
    bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
             ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) &&
             ( commandBufferCount == rhs.commandBufferCount ) && ( pCommandBuffers == rhs.pCommandBuffers ) &&
             ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && ( pSignalSemaphores == rhs.pSignalSemaphores );
    }

    bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType                = StructureType::eSubmitInfo;
    const void *                                     pNext                = {};
    uint32_t                                         waitSemaphoreCount   = {};
    const VULKAN_HPP_NAMESPACE::Semaphore *          pWaitSemaphores      = {};
    const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask    = {};
    uint32_t                                         commandBufferCount   = {};
    const VULKAN_HPP_NAMESPACE::CommandBuffer *      pCommandBuffers      = {};
    uint32_t                                         signalSemaphoreCount = {};
    const VULKAN_HPP_NAMESPACE::Semaphore *          pSignalSemaphores    = {};
  };
  static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubmitInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSubmitInfo>
  {
    using Type = SubmitInfo;
  };

  struct SemaphoreSubmitInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore              semaphore_ = {},
                                                 uint64_t                                     value_     = {},
                                                 VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ = {},
                                                 uint32_t deviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : semaphore( semaphore_ )
      , value( value_ )
      , stageMask( stageMask_ )
      , deviceIndex( deviceIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfoKHR( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreSubmitInfoKHR( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreSubmitInfoKHR( *reinterpret_cast<SemaphoreSubmitInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfoKHR &
                            operator=( SemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreSubmitInfoKHR & operator=( VkSemaphoreSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR const *>( &rhs );
      return *this;
    }

    SemaphoreSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SemaphoreSubmitInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    SemaphoreSubmitInfoKHR & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
    {
      value = value_;
      return *this;
    }

    SemaphoreSubmitInfoKHR & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask_ ) VULKAN_HPP_NOEXCEPT
    {
      stageMask = stageMask_;
      return *this;
    }

    SemaphoreSubmitInfoKHR & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceIndex = deviceIndex_;
      return *this;
    }

    operator VkSemaphoreSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreSubmitInfoKHR *>( this );
    }

    operator VkSemaphoreSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreSubmitInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreSubmitInfoKHR const & ) const = default;
#else
    bool operator==( SemaphoreSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
             ( value == rhs.value ) && ( stageMask == rhs.stageMask ) && ( deviceIndex == rhs.deviceIndex );
    }

    bool operator!=( SemaphoreSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType       = StructureType::eSemaphoreSubmitInfoKHR;
    const void *                                 pNext       = {};
    VULKAN_HPP_NAMESPACE::Semaphore              semaphore   = {};
    uint64_t                                     value       = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask   = {};
    uint32_t                                     deviceIndex = {};
  };
  static_assert( sizeof( SemaphoreSubmitInfoKHR ) == sizeof( VkSemaphoreSubmitInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SemaphoreSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreSubmitInfoKHR>
  {
    using Type = SemaphoreSubmitInfoKHR;
  };

  struct SubmitInfo2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubmitInfo2KHR(
      VULKAN_HPP_NAMESPACE::SubmitFlagsKHR                     flags_                    = {},
      uint32_t                                                 waitSemaphoreInfoCount_   = {},
      const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR *     pWaitSemaphoreInfos_      = {},
      uint32_t                                                 commandBufferInfoCount_   = {},
      const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR * pCommandBufferInfos_      = {},
      uint32_t                                                 signalSemaphoreInfoCount_ = {},
      const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR *     pSignalSemaphoreInfos_    = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , waitSemaphoreInfoCount( waitSemaphoreInfoCount_ )
      , pWaitSemaphoreInfos( pWaitSemaphoreInfos_ )
      , commandBufferInfoCount( commandBufferInfoCount_ )
      , pCommandBufferInfos( pCommandBufferInfos_ )
      , signalSemaphoreInfoCount( signalSemaphoreInfoCount_ )
      , pSignalSemaphoreInfos( pSignalSemaphoreInfos_ )
    {}

    VULKAN_HPP_CONSTEXPR SubmitInfo2KHR( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubmitInfo2KHR( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubmitInfo2KHR( *reinterpret_cast<SubmitInfo2KHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo2KHR(
      VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const &
        waitSemaphoreInfos_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR> const &
        commandBufferInfos_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const &
        signalSemaphoreInfos_ = {} )
      : flags( flags_ )
      , waitSemaphoreInfoCount( static_cast<uint32_t>( waitSemaphoreInfos_.size() ) )
      , pWaitSemaphoreInfos( waitSemaphoreInfos_.data() )
      , commandBufferInfoCount( static_cast<uint32_t>( commandBufferInfos_.size() ) )
      , pCommandBufferInfos( commandBufferInfos_.data() )
      , signalSemaphoreInfoCount( static_cast<uint32_t>( signalSemaphoreInfos_.size() ) )
      , pSignalSemaphoreInfos( signalSemaphoreInfos_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubmitInfo2KHR & operator=( SubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubmitInfo2KHR & operator=( VkSubmitInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo2KHR const *>( &rhs );
      return *this;
    }

    SubmitInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SubmitInfo2KHR & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    SubmitInfo2KHR & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreInfoCount = waitSemaphoreInfoCount_;
      return *this;
    }

    SubmitInfo2KHR & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pWaitSemaphoreInfos_ )
      VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphoreInfos = pWaitSemaphoreInfos_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo2KHR & setWaitSemaphoreInfos(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const &
        waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreInfoCount = static_cast<uint32_t>( waitSemaphoreInfos_.size() );
      pWaitSemaphoreInfos    = waitSemaphoreInfos_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubmitInfo2KHR & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferInfoCount = commandBufferInfoCount_;
      return *this;
    }

    SubmitInfo2KHR & setPCommandBufferInfos(
      const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pCommandBufferInfos = pCommandBufferInfos_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo2KHR & setCommandBufferInfos(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR> const &
        commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferInfoCount = static_cast<uint32_t>( commandBufferInfos_.size() );
      pCommandBufferInfos    = commandBufferInfos_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SubmitInfo2KHR & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreInfoCount = signalSemaphoreInfoCount_;
      return *this;
    }

    SubmitInfo2KHR & setPSignalSemaphoreInfos(
      const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphoreInfos = pSignalSemaphoreInfos_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SubmitInfo2KHR & setSignalSemaphoreInfos(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR> const &
        signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreInfoCount = static_cast<uint32_t>( signalSemaphoreInfos_.size() );
      pSignalSemaphoreInfos    = signalSemaphoreInfos_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSubmitInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubmitInfo2KHR *>( this );
    }

    operator VkSubmitInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubmitInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubmitInfo2KHR const & ) const = default;
#else
    bool operator==( SubmitInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) &&
             ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) &&
             ( commandBufferInfoCount == rhs.commandBufferInfoCount ) &&
             ( pCommandBufferInfos == rhs.pCommandBufferInfos ) &&
             ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) &&
             ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos );
    }

    bool operator!=( SubmitInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                      sType                    = StructureType::eSubmitInfo2KHR;
    const void *                                             pNext                    = {};
    VULKAN_HPP_NAMESPACE::SubmitFlagsKHR                     flags                    = {};
    uint32_t                                                 waitSemaphoreInfoCount   = {};
    const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR *     pWaitSemaphoreInfos      = {};
    uint32_t                                                 commandBufferInfoCount   = {};
    const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR * pCommandBufferInfos      = {};
    uint32_t                                                 signalSemaphoreInfoCount = {};
    const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR *     pSignalSemaphoreInfos    = {};
  };
  static_assert( sizeof( SubmitInfo2KHR ) == sizeof( VkSubmitInfo2KHR ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubmitInfo2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSubmitInfo2KHR>
  {
    using Type = SubmitInfo2KHR;
  };

  class Queue
  {
  public:
    using CType = VkQueue;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;

  public:
    VULKAN_HPP_CONSTEXPR         Queue() = default;
    VULKAN_HPP_CONSTEXPR         Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT
    {
      m_queue = queue;
      return *this;
    }
#endif

    Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_queue = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Queue const & ) const = default;
#else
    bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_queue == rhs.m_queue;
    }

    bool operator!=( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_queue != rhs.m_queue;
    }

    bool operator<( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_queue < rhs.m_queue;
    }
#endif

    //=== VK_VERSION_1_0 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         submit( uint32_t                                 submitCount,
                                 const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
                                 VULKAN_HPP_NAMESPACE::Fence              fence,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
              VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         bindSparse( uint32_t                                     bindInfoCount,
                                     const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
                                     VULKAN_HPP_NAMESPACE::Fence                  fence,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
                  VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_swapchain ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_debug_utils ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_NV_device_diagnostic_checkpoints ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getCheckpointDataNV( uint32_t *                               pCheckpointDataCount,
                              VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>,
              typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
                         getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>,
              typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                         = CheckpointDataNVAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
                         getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_INTEL_performance_query ===

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL(
      VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
         setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_synchronization2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         submit2KHR( uint32_t                                     submitCount,
                                     const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits,
                                     VULKAN_HPP_NAMESPACE::Fence                  fence,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits,
                  VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getCheckpointData2NV( uint32_t *                                pCheckpointDataCount,
                               VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>,
              typename Dispatch                   = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
                         getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename CheckpointData2NVAllocator = std::allocator<CheckpointData2NV>,
              typename Dispatch                   = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                          = CheckpointData2NVAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
                         getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT
    {
      return m_queue;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_queue != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_queue == VK_NULL_HANDLE;
    }

  private:
    VkQueue m_queue = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eQueue>
  {
    using type = VULKAN_HPP_NAMESPACE::Queue;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueue>
  {
    using Type = VULKAN_HPP_NAMESPACE::Queue;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue>
  {
    using Type = VULKAN_HPP_NAMESPACE::Queue;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Queue>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct DeviceQueueInfo2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_            = {},
                                           uint32_t                                     queueFamilyIndex_ = {},
                                           uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , queueFamilyIndex( queueFamilyIndex_ )
      , queueIndex( queueIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceQueueInfo2( *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>( &rhs );
      return *this;
    }

    DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueIndex = queueIndex_;
      return *this;
    }

    operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceQueueInfo2 *>( this );
    }

    operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceQueueInfo2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceQueueInfo2 const & ) const = default;
#else
    bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueIndex == rhs.queueIndex );
    }

    bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType            = StructureType::eDeviceQueueInfo2;
    const void *                                 pNext            = {};
    VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags            = {};
    uint32_t                                     queueFamilyIndex = {};
    uint32_t                                     queueIndex       = {};
  };
  static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
  {
    using Type = DeviceQueueInfo2;
  };

  struct FenceGetFdInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence                           fence_ = {},
                         VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
                           VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : fence( fence_ )
      , handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : FenceGetFdInfoKHR( *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR &
                            operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>( &rhs );
      return *this;
    }

    FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    FenceGetFdInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFenceGetFdInfoKHR *>( this );
    }

    operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFenceGetFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FenceGetFdInfoKHR const & ) const = default;
#else
    bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) &&
             ( handleType == rhs.handleType );
    }

    bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType = StructureType::eFenceGetFdInfoKHR;
    const void *                                          pNext = {};
    VULKAN_HPP_NAMESPACE::Fence                           fence = {};
    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
  {
    using Type = FenceGetFdInfoKHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct FenceGetWin32HandleInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(
      VULKAN_HPP_NAMESPACE::Fence                           fence_ = {},
      VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
        VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : fence( fence_ )
      , handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR
      FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : FenceGetWin32HandleInfoKHR( *reinterpret_cast<FenceGetWin32HandleInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR &
                            operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

    FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    FenceGetWin32HandleInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( this );
    }

    operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) &&
             ( handleType == rhs.handleType );
    }

    bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType = StructureType::eFenceGetWin32HandleInfoKHR;
    const void *                                          pNext = {};
    VULKAN_HPP_NAMESPACE::Fence                           fence = {};
    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FenceGetWin32HandleInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFenceGetWin32HandleInfoKHR>
  {
    using Type = FenceGetWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  struct GeneratedCommandsMemoryRequirementsInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics,
      VULKAN_HPP_NAMESPACE::Pipeline          pipeline_          = {},
      VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {},
      uint32_t                                       maxSequencesCount_      = {} ) VULKAN_HPP_NOEXCEPT
      : pipelineBindPoint( pipelineBindPoint_ )
      , pipeline( pipeline_ )
      , indirectCommandsLayout( indirectCommandsLayout_ )
      , maxSequencesCount( maxSequencesCount_ )
    {}

    VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(
      GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : GeneratedCommandsMemoryRequirementsInfoNV(
          *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV &
                            operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GeneratedCommandsMemoryRequirementsInfoNV &
      operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs );
      return *this;
    }

    GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    GeneratedCommandsMemoryRequirementsInfoNV &
      setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineBindPoint = pipelineBindPoint_;
      return *this;
    }

    GeneratedCommandsMemoryRequirementsInfoNV &
      setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }

    GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout(
      VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      indirectCommandsLayout = indirectCommandsLayout_;
      return *this;
    }

    GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSequencesCount = maxSequencesCount_;
      return *this;
    }

    operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( this );
    }

    operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default;
#else
    bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) &&
             ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) &&
             ( maxSequencesCount == rhs.maxSequencesCount );
    }

    bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType     sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
    const void *                            pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
    VULKAN_HPP_NAMESPACE::Pipeline          pipeline          = {};
    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
    uint32_t                                       maxSequencesCount      = {};
  };
  static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) ==
                   sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<GeneratedCommandsMemoryRequirementsInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoNV>
  {
    using Type = GeneratedCommandsMemoryRequirementsInfoNV;
  };

  struct ImageDrmFormatModifierPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eImageDrmFormatModifierPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT
      : drmFormatModifier( drmFormatModifier_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast<ImageDrmFormatModifierPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierPropertiesEXT &
                            operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageDrmFormatModifierPropertiesEXT &
      operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT *>( this );
    }

    operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default;
#else
    bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier );
    }

    bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::eImageDrmFormatModifierPropertiesEXT;
    void *                              pNext             = {};
    uint64_t                            drmFormatModifier = {};
  };
  static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageDrmFormatModifierPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
  {
    using Type = ImageDrmFormatModifierPropertiesEXT;
  };

  struct ImageMemoryRequirementsInfo2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT
      : image( image_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageMemoryRequirementsInfo2( *reinterpret_cast<ImageMemoryRequirementsInfo2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 &
                            operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>( &rhs );
      return *this;
    }

    ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( this );
    }

    operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageMemoryRequirementsInfo2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default;
#else
    bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image );
    }

    bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::Image         image = {};
  };
  static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageMemoryRequirementsInfo2>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
  {
    using Type = ImageMemoryRequirementsInfo2;
  };
  using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;

  struct SparseImageFormatProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags       aspectMask_       = {},
                                   VULKAN_HPP_NAMESPACE::Extent3D               imageGranularity_ = {},
                                   VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : aspectMask( aspectMask_ )
      , imageGranularity( imageGranularity_ )
      , flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageFormatProperties( *reinterpret_cast<SparseImageFormatProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SparseImageFormatProperties &
                            operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>( &rhs );
      return *this;
    }

    operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageFormatProperties *>( this );
    }

    operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageFormatProperties const & ) const = default;
#else
    bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags );
    }

    bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ImageAspectFlags       aspectMask       = {};
    VULKAN_HPP_NAMESPACE::Extent3D               imageGranularity = {};
    VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags            = {};
  };
  static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SparseImageFormatProperties>::value,
                 "struct wrapper is not a standard layout!" );

  struct SparseImageMemoryRequirements
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_     = {},
                                     uint32_t                                          imageMipTailFirstLod_ = {},
                                     VULKAN_HPP_NAMESPACE::DeviceSize                  imageMipTailSize_     = {},
                                     VULKAN_HPP_NAMESPACE::DeviceSize                  imageMipTailOffset_   = {},
                                     VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT
      : formatProperties( formatProperties_ )
      , imageMipTailFirstLod( imageMipTailFirstLod_ )
      , imageMipTailSize( imageMipTailSize_ )
      , imageMipTailOffset( imageMipTailOffset_ )
      , imageMipTailStride( imageMipTailStride_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageMemoryRequirements( *reinterpret_cast<SparseImageMemoryRequirements const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryRequirements &
                            operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>( &rhs );
      return *this;
    }

    operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageMemoryRequirements *>( this );
    }

    operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageMemoryRequirements *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageMemoryRequirements const & ) const = default;
#else
    bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) &&
             ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) &&
             ( imageMipTailStride == rhs.imageMipTailStride );
    }

    bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties     = {};
    uint32_t                                          imageMipTailFirstLod = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  imageMipTailSize     = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  imageMipTailOffset   = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  imageMipTailStride   = {};
  };
  static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SparseImageMemoryRequirements>::value,
                 "struct wrapper is not a standard layout!" );

  struct ImageSparseMemoryRequirementsInfo2
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eImageSparseMemoryRequirementsInfo2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT
      : image( image_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast<ImageSparseMemoryRequirementsInfo2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 &
                            operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSparseMemoryRequirementsInfo2 &
      operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>( &rhs );
      return *this;
    }

    ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( this );
    }

    operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default;
#else
    bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image );
    }

    bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::Image         image = {};
  };
  static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageSparseMemoryRequirementsInfo2>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageSparseMemoryRequirementsInfo2>
  {
    using Type = ImageSparseMemoryRequirementsInfo2;
  };
  using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;

  struct SparseImageMemoryRequirements2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(
      VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryRequirements( memoryRequirements_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageMemoryRequirements2( *reinterpret_cast<SparseImageMemoryRequirements2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryRequirements2 &
                            operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>( &rhs );
      return *this;
    }

    operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageMemoryRequirements2 *>( this );
    }

    operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageMemoryRequirements2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default;
#else
    bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements );
    }

    bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType = StructureType::eSparseImageMemoryRequirements2;
    void *                                              pNext = {};
    VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
  };
  static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SparseImageMemoryRequirements2>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSparseImageMemoryRequirements2>
  {
    using Type = SparseImageMemoryRequirements2;
  };
  using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;

  struct SubresourceLayout
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_     = {},
                                            VULKAN_HPP_NAMESPACE::DeviceSize size_       = {},
                                            VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_   = {},
                                            VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {},
                                            VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT
      : offset( offset_ )
      , size( size_ )
      , rowPitch( rowPitch_ )
      , arrayPitch( arrayPitch_ )
      , depthPitch( depthPitch_ )
    {}

    VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubresourceLayout( *reinterpret_cast<SubresourceLayout const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubresourceLayout &
                            operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>( &rhs );
      return *this;
    }

    operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubresourceLayout *>( this );
    }

    operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubresourceLayout *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubresourceLayout const & ) const = default;
#else
    bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) &&
             ( arrayPitch == rhs.arrayPitch ) && ( depthPitch == rhs.depthPitch );
    }

    bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DeviceSize offset     = {};
    VULKAN_HPP_NAMESPACE::DeviceSize size       = {};
    VULKAN_HPP_NAMESPACE::DeviceSize rowPitch   = {};
    VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
    VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
  };
  static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubresourceLayout>::value, "struct wrapper is not a standard layout!" );

  struct ImageViewAddressPropertiesNVX
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {},
                                     VULKAN_HPP_NAMESPACE::DeviceSize    size_          = {} ) VULKAN_HPP_NOEXCEPT
      : deviceAddress( deviceAddress_ )
      , size( size_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewAddressPropertiesNVX( *reinterpret_cast<ImageViewAddressPropertiesNVX const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageViewAddressPropertiesNVX &
                            operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const *>( &rhs );
      return *this;
    }

    operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewAddressPropertiesNVX *>( this );
    }

    operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewAddressPropertiesNVX *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default;
#else
    bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) &&
             ( size == rhs.size );
    }

    bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::eImageViewAddressPropertiesNVX;
    void *                              pNext         = {};
    VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    size          = {};
  };
  static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageViewAddressPropertiesNVX>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageViewAddressPropertiesNVX>
  {
    using Type = ImageViewAddressPropertiesNVX;
  };

  struct ImageViewHandleInfoNVX
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(
      VULKAN_HPP_NAMESPACE::ImageView      imageView_      = {},
      VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler,
      VULKAN_HPP_NAMESPACE::Sampler        sampler_        = {} ) VULKAN_HPP_NOEXCEPT
      : imageView( imageView_ )
      , descriptorType( descriptorType_ )
      , sampler( sampler_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewHandleInfoNVX( *reinterpret_cast<ImageViewHandleInfoNVX const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX &
                            operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>( &rhs );
      return *this;
    }

    ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView = imageView_;
      return *this;
    }

    ImageViewHandleInfoNVX &
      setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorType = descriptorType_;
      return *this;
    }

    ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
    {
      sampler = sampler_;
      return *this;
    }

    operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewHandleInfoNVX *>( this );
    }

    operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewHandleInfoNVX *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewHandleInfoNVX const & ) const = default;
#else
    bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) &&
             ( descriptorType == rhs.descriptorType ) && ( sampler == rhs.sampler );
    }

    bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType  sType          = StructureType::eImageViewHandleInfoNVX;
    const void *                         pNext          = {};
    VULKAN_HPP_NAMESPACE::ImageView      imageView      = {};
    VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
    VULKAN_HPP_NAMESPACE::Sampler        sampler        = {};
  };
  static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageViewHandleInfoNVX>
  {
    using Type = ImageViewHandleInfoNVX;
  };

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  struct MemoryGetAndroidHardwareBufferInfoANDROID
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT
      : memory( memory_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(
      MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs )
      VULKAN_HPP_NOEXCEPT
      : MemoryGetAndroidHardwareBufferInfoANDROID(
          *reinterpret_cast<MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID &
                            operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetAndroidHardwareBufferInfoANDROID &
      operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs );
      return *this;
    }

    MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryGetAndroidHardwareBufferInfoANDROID &
      setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( this );
    }

    operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default;
#  else
    bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory );
    }

    bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
    const void *                        pNext  = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory  memory = {};
  };
  static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) ==
                   sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryGetAndroidHardwareBufferInfoANDROID>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID>
  {
    using Type = MemoryGetAndroidHardwareBufferInfoANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

  struct MemoryGetFdInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory                     memory_ = {},
                          VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
                            VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : memory( memory_ )
      , handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryGetFdInfoKHR( *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR &
                            operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>( &rhs );
      return *this;
    }

    MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    MemoryGetFdInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetFdInfoKHR *>( this );
    }

    operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetFdInfoKHR const & ) const = default;
#else
    bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) &&
             ( handleType == rhs.handleType );
    }

    bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType  = StructureType::eMemoryGetFdInfoKHR;
    const void *                                           pNext  = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory                     memory = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
  {
    using Type = MemoryGetFdInfoKHR;
  };

  struct MemoryFdPropertiesKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryTypeBits( memoryTypeBits_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryFdPropertiesKHR( *reinterpret_cast<MemoryFdPropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryFdPropertiesKHR &
                            operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryFdPropertiesKHR *>( this );
    }

    operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryFdPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryFdPropertiesKHR const & ) const = default;
#else
    bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
    }

    bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eMemoryFdPropertiesKHR;
    void *                              pNext          = {};
    uint32_t                            memoryTypeBits = {};
  };
  static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
  {
    using Type = MemoryFdPropertiesKHR;
  };

  struct MemoryHostPointerPropertiesEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryTypeBits( memoryTypeBits_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryHostPointerPropertiesEXT( *reinterpret_cast<MemoryHostPointerPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryHostPointerPropertiesEXT &
                            operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT *>( this );
    }

    operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default;
#else
    bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
    }

    bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eMemoryHostPointerPropertiesEXT;
    void *                              pNext          = {};
    uint32_t                            memoryTypeBits = {};
  };
  static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryHostPointerPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
  {
    using Type = MemoryHostPointerPropertiesEXT;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct MemoryGetWin32HandleInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(
      VULKAN_HPP_NAMESPACE::DeviceMemory                     memory_ = {},
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
        VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : memory( memory_ )
      , handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryGetWin32HandleInfoKHR( *reinterpret_cast<MemoryGetWin32HandleInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR &
                            operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

    MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    MemoryGetWin32HandleInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( this );
    }

    operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) &&
             ( handleType == rhs.handleType );
    }

    bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType  = StructureType::eMemoryGetWin32HandleInfoKHR;
    const void *                                           pNext  = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory                     memory = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryGetWin32HandleInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetWin32HandleInfoKHR>
  {
    using Type = MemoryGetWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct MemoryWin32HandlePropertiesKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryTypeBits( memoryTypeBits_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryWin32HandlePropertiesKHR( *reinterpret_cast<MemoryWin32HandlePropertiesKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryWin32HandlePropertiesKHR &
                            operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR *>( this );
    }

    operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default;
#  else
    bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
    }

    bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eMemoryWin32HandlePropertiesKHR;
    void *                              pNext          = {};
    uint32_t                            memoryTypeBits = {};
  };
  static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryWin32HandlePropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryWin32HandlePropertiesKHR>
  {
    using Type = MemoryWin32HandlePropertiesKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  struct MemoryGetZirconHandleInfoFUCHSIA
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA(
      VULKAN_HPP_NAMESPACE::DeviceMemory                     memory_ = {},
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
        VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : memory( memory_ )
      , handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast<MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA &
                            operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA const *>( &rhs );
      return *this;
    }

    MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
    {
      memory = memory_;
      return *this;
    }

    MemoryGetZirconHandleInfoFUCHSIA &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryGetZirconHandleInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) &&
             ( handleType == rhs.handleType );
    }

    bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType  = StructureType::eMemoryGetZirconHandleInfoFUCHSIA;
    const void *                                           pNext  = {};
    VULKAN_HPP_NAMESPACE::DeviceMemory                     memory = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( MemoryGetZirconHandleInfoFUCHSIA ) == sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryGetZirconHandleInfoFUCHSIA>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryGetZirconHandleInfoFUCHSIA>
  {
    using Type = MemoryGetZirconHandleInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  struct MemoryZirconHandlePropertiesFUCHSIA
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eMemoryZirconHandlePropertiesFUCHSIA;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryTypeBits( memoryTypeBits_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast<MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryZirconHandlePropertiesFUCHSIA &
                            operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryZirconHandlePropertiesFUCHSIA &
      operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA const *>( &rhs );
      return *this;
    }

    operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryZirconHandlePropertiesFUCHSIA *>( this );
    }

    operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default;
#  else
    bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits );
    }

    bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eMemoryZirconHandlePropertiesFUCHSIA;
    void *                              pNext          = {};
    uint32_t                            memoryTypeBits = {};
  };
  static_assert( sizeof( MemoryZirconHandlePropertiesFUCHSIA ) == sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryZirconHandlePropertiesFUCHSIA>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryZirconHandlePropertiesFUCHSIA>
  {
    using Type = MemoryZirconHandlePropertiesFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  struct PastPresentationTimingGOOGLE
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_           = {},
                                                       uint64_t desiredPresentTime_  = {},
                                                       uint64_t actualPresentTime_   = {},
                                                       uint64_t earliestPresentTime_ = {},
                                                       uint64_t presentMargin_       = {} ) VULKAN_HPP_NOEXCEPT
      : presentID( presentID_ )
      , desiredPresentTime( desiredPresentTime_ )
      , actualPresentTime( actualPresentTime_ )
      , earliestPresentTime( earliestPresentTime_ )
      , presentMargin( presentMargin_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PastPresentationTimingGOOGLE( *reinterpret_cast<PastPresentationTimingGOOGLE const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingGOOGLE &
                            operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>( &rhs );
      return *this;
    }

    operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPastPresentationTimingGOOGLE *>( this );
    }

    operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPastPresentationTimingGOOGLE *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default;
#else
    bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) &&
             ( actualPresentTime == rhs.actualPresentTime ) && ( earliestPresentTime == rhs.earliestPresentTime ) &&
             ( presentMargin == rhs.presentMargin );
    }

    bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t presentID           = {};
    uint64_t desiredPresentTime  = {};
    uint64_t actualPresentTime   = {};
    uint64_t earliestPresentTime = {};
    uint64_t presentMargin       = {};
  };
  static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PastPresentationTimingGOOGLE>::value,
                 "struct wrapper is not a standard layout!" );

  union PerformanceValueDataINTEL
  {
    PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
    }

    PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {}

    PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {}

    PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {}

    PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {}

    PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
    {
      value32 = value32_;
      return *this;
    }

    PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
    {
      value64 = value64_;
      return *this;
    }

    PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
    {
      valueFloat = valueFloat_;
      return *this;
    }

    PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
    {
      valueBool = valueBool_;
      return *this;
    }

    PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT
    {
      valueString = valueString_;
      return *this;
    }

    VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL &
      operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
      return *this;
    }

    operator VkPerformanceValueDataINTEL const &() const
    {
      return *reinterpret_cast<const VkPerformanceValueDataINTEL *>( this );
    }

    operator VkPerformanceValueDataINTEL &()
    {
      return *reinterpret_cast<VkPerformanceValueDataINTEL *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    uint32_t                     value32;
    uint64_t                     value64;
    float                        valueFloat;
    VULKAN_HPP_NAMESPACE::Bool32 valueBool;
    const char *                 valueString;
#else
    uint32_t     value32;
    uint64_t     value64;
    float        valueFloat;
    VkBool32     valueBool;
    const char * valueString;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

  struct PerformanceValueINTEL
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    PerformanceValueINTEL(
      VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32,
      VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT
      : type( type_ )
      , data( data_ )
    {}

    PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceValueINTEL( *reinterpret_cast<PerformanceValueINTEL const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>( &rhs );
      return *this;
    }

    PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }

    operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceValueINTEL *>( this );
    }

    operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceValueINTEL *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
    VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
  };
  static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );

  struct PipelineExecutableInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {},
                                                    uint32_t executableIndex_                = {} ) VULKAN_HPP_NOEXCEPT
      : pipeline( pipeline_ )
      , executableIndex( executableIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineExecutableInfoKHR( *reinterpret_cast<PipelineExecutableInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR &
                            operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>( &rhs );
      return *this;
    }

    PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }

    PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      executableIndex = executableIndex_;
      return *this;
    }

    operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineExecutableInfoKHR *>( this );
    }

    operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineExecutableInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineExecutableInfoKHR const & ) const = default;
#else
    bool         operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) &&
             ( executableIndex == rhs.executableIndex );
    }

    bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::ePipelineExecutableInfoKHR;
    const void *                        pNext           = {};
    VULKAN_HPP_NAMESPACE::Pipeline      pipeline        = {};
    uint32_t                            executableIndex = {};
  };
  static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineExecutableInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineExecutableInfoKHR>
  {
    using Type = PipelineExecutableInfoKHR;
  };

  struct PipelineExecutableInternalRepresentationKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineExecutableInternalRepresentationKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PipelineExecutableInternalRepresentationKHR( std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_        = {},
                                                   std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
                                                   VULKAN_HPP_NAMESPACE::Bool32                      isText_      = {},
                                                   size_t                                            dataSize_    = {},
                                                   void * pData_ = {} ) VULKAN_HPP_NOEXCEPT
      : name( name_ )
      , description( description_ )
      , isText( isText_ )
      , dataSize( dataSize_ )
      , pData( pData_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(
      PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineExecutableInternalRepresentationKHR(
          *reinterpret_cast<PipelineExecutableInternalRepresentationKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    PipelineExecutableInternalRepresentationKHR( std::array<char, VK_MAX_DESCRIPTION_SIZE> const &        name_,
                                                 std::array<char, VK_MAX_DESCRIPTION_SIZE> const &        description_,
                                                 VULKAN_HPP_NAMESPACE::Bool32                             isText_,
                                                 VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_ )
      : name( name_ )
      , description( description_ )
      , isText( isText_ )
      , dataSize( data_.size() * sizeof( T ) )
      , pData( data_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR &
                            operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutableInternalRepresentationKHR &
      operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>( &rhs );
      return *this;
    }

    operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR *>( this );
    }

    operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default;
#else
    bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) &&
             ( description == rhs.description ) && ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) &&
             ( pData == rhs.pData );
    }

    bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name        = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
    VULKAN_HPP_NAMESPACE::Bool32                                        isText      = {};
    size_t                                                              dataSize    = {};
    void *                                                              pData       = {};
  };
  static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) ==
                   sizeof( VkPipelineExecutableInternalRepresentationKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineExecutableInternalRepresentationKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineExecutableInternalRepresentationKHR>
  {
    using Type = PipelineExecutableInternalRepresentationKHR;
  };

  struct PipelineInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT
      : pipeline( pipeline_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineInfoKHR( *reinterpret_cast<PipelineInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>( &rhs );
      return *this;
    }

    PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      pipeline = pipeline_;
      return *this;
    }

    operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineInfoKHR *>( this );
    }

    operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineInfoKHR const & ) const = default;
#else
    bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline );
    }

    bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::ePipelineInfoKHR;
    const void *                        pNext    = {};
    VULKAN_HPP_NAMESPACE::Pipeline      pipeline = {};
  };
  static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineInfoKHR>
  {
    using Type = PipelineInfoKHR;
  };

  struct PipelineExecutablePropertiesKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags            stages_      = {},
                                       std::array<char, VK_MAX_DESCRIPTION_SIZE> const & name_        = {},
                                       std::array<char, VK_MAX_DESCRIPTION_SIZE> const & description_ = {},
                                       uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : stages( stages_ )
      , name( name_ )
      , description( description_ )
      , subgroupSize( subgroupSize_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineExecutablePropertiesKHR( *reinterpret_cast<PipelineExecutablePropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR &
                            operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR *>( this );
    }

    operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default;
#else
    bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) &&
             ( description == rhs.description ) && ( subgroupSize == rhs.subgroupSize );
    }

    bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType  = StructureType::ePipelineExecutablePropertiesKHR;
    void *                                 pNext  = {};
    VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name         = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description  = {};
    uint32_t                                                            subgroupSize = {};
  };
  static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineExecutablePropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineExecutablePropertiesKHR>
  {
    using Type = PipelineExecutablePropertiesKHR;
  };

  union PipelineExecutableStatisticValueKHR
  {
    PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
    }

    PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) {}

    PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {}

    PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {}

    PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {}

    PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
    {
      b32 = b32_;
      return *this;
    }

    PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT
    {
      i64 = i64_;
      return *this;
    }

    PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT
    {
      u64 = u64_;
      return *this;
    }

    PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT
    {
      f64 = f64_;
      return *this;
    }

    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR &
      operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
      return *this;
    }

    operator VkPipelineExecutableStatisticValueKHR const &() const
    {
      return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR *>( this );
    }

    operator VkPipelineExecutableStatisticValueKHR &()
    {
      return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR *>( this );
    }

#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
    VULKAN_HPP_NAMESPACE::Bool32 b32;
    int64_t                      i64;
    uint64_t                     u64;
    double                       f64;
#else
    VkBool32 b32;
    int64_t  i64;
    uint64_t u64;
    double   f64;
#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  };

  struct PipelineExecutableStatisticKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    PipelineExecutableStatisticKHR( std::array<char, VK_MAX_DESCRIPTION_SIZE> const &          name_        = {},
                                    std::array<char, VK_MAX_DESCRIPTION_SIZE> const &          description_ = {},
                                    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ =
                                      VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32,
                                    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} )
      VULKAN_HPP_NOEXCEPT
      : name( name_ )
      , description( description_ )
      , format( format_ )
      , value( value_ )
    {}

    PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineExecutableStatisticKHR( *reinterpret_cast<PipelineExecutableStatisticKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    PipelineExecutableStatisticKHR &
      operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>( &rhs );
      return *this;
    }

    operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineExecutableStatisticKHR *>( this );
    }

    operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineExecutableStatisticKHR *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name        = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR          format =
      VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
    VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
  };
  static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineExecutableStatisticKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineExecutableStatisticKHR>
  {
    using Type = PipelineExecutableStatisticKHR;
  };

  struct RefreshCycleDurationGOOGLE
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT
      : refreshDuration( refreshDuration_ )
    {}

    VULKAN_HPP_CONSTEXPR
      RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
      : RefreshCycleDurationGOOGLE( *reinterpret_cast<RefreshCycleDurationGOOGLE const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RefreshCycleDurationGOOGLE &
                            operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>( &rhs );
      return *this;
    }

    operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE *>( this );
    }

    operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default;
#else
    bool     operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( refreshDuration == rhs.refreshDuration );
    }

    bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint64_t refreshDuration = {};
  };
  static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RefreshCycleDurationGOOGLE>::value,
                 "struct wrapper is not a standard layout!" );

  struct SemaphoreGetFdInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
                                                VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
                                                  VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd )
      VULKAN_HPP_NOEXCEPT
      : semaphore( semaphore_ )
      , handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreGetFdInfoKHR( *reinterpret_cast<SemaphoreGetFdInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR &
                            operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>( &rhs );
      return *this;
    }

    SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    SemaphoreGetFdInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( this );
    }

    operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreGetFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default;
#else
    bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
             ( handleType == rhs.handleType );
    }

    bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                       sType     = StructureType::eSemaphoreGetFdInfoKHR;
    const void *                                              pNext     = {};
    VULKAN_HPP_NAMESPACE::Semaphore                           semaphore = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
  {
    using Type = SemaphoreGetFdInfoKHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct SemaphoreGetWin32HandleInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(
      VULKAN_HPP_NAMESPACE::Semaphore                           semaphore_ = {},
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
        VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : semaphore( semaphore_ )
      , handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast<SemaphoreGetWin32HandleInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR &
                            operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

    SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    SemaphoreGetWin32HandleInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( this );
    }

    operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
             ( handleType == rhs.handleType );
    }

    bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                       sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
    const void *                                              pNext = {};
    VULKAN_HPP_NAMESPACE::Semaphore                           semaphore = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SemaphoreGetWin32HandleInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreGetWin32HandleInfoKHR>
  {
    using Type = SemaphoreGetWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  struct SemaphoreGetZirconHandleInfoFUCHSIA
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA(
      VULKAN_HPP_NAMESPACE::Semaphore                           semaphore_ = {},
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
        VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : semaphore( semaphore_ )
      , handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast<SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA &
                            operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreGetZirconHandleInfoFUCHSIA &
      operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA const *>( &rhs );
      return *this;
    }

    SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    SemaphoreGetZirconHandleInfoFUCHSIA &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreGetZirconHandleInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
             ( handleType == rhs.handleType );
    }

    bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType     = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA;
    const void *                        pNext     = {};
    VULKAN_HPP_NAMESPACE::Semaphore     semaphore = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( SemaphoreGetZirconHandleInfoFUCHSIA ) == sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SemaphoreGetZirconHandleInfoFUCHSIA>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA>
  {
    using Type = SemaphoreGetZirconHandleInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoGetMemoryPropertiesKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoGetMemoryPropertiesKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoGetMemoryPropertiesKHR(
      uint32_t                                    memoryBindIndex_     = {},
      VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryBindIndex( memoryBindIndex_ )
      , pMemoryRequirements( pMemoryRequirements_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoGetMemoryPropertiesKHR( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoGetMemoryPropertiesKHR( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoGetMemoryPropertiesKHR( *reinterpret_cast<VideoGetMemoryPropertiesKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoGetMemoryPropertiesKHR &
                            operator=( VideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoGetMemoryPropertiesKHR & operator=( VkVideoGetMemoryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR const *>( &rhs );
      return *this;
    }

    VideoGetMemoryPropertiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoGetMemoryPropertiesKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryBindIndex = memoryBindIndex_;
      return *this;
    }

    VideoGetMemoryPropertiesKHR &
      setPMemoryRequirements( VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements_ ) VULKAN_HPP_NOEXCEPT
    {
      pMemoryRequirements = pMemoryRequirements_;
      return *this;
    }

    operator VkVideoGetMemoryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoGetMemoryPropertiesKHR *>( this );
    }

    operator VkVideoGetMemoryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoGetMemoryPropertiesKHR const & ) const = default;
#  else
    bool operator==( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) &&
             ( pMemoryRequirements == rhs.pMemoryRequirements );
    }

    bool operator!=( VideoGetMemoryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType               = StructureType::eVideoGetMemoryPropertiesKHR;
    const void *                                pNext               = {};
    uint32_t                                    memoryBindIndex     = {};
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements = {};
  };
  static_assert( sizeof( VideoGetMemoryPropertiesKHR ) == sizeof( VkVideoGetMemoryPropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoGetMemoryPropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoGetMemoryPropertiesKHR>
  {
    using Type = VideoGetMemoryPropertiesKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  struct ImportFenceFdInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence                           fence_ = {},
                                               VULKAN_HPP_NAMESPACE::FenceImportFlags                flags_ = {},
                                               VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
                                                 VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
                                               int fd_ = {} ) VULKAN_HPP_NOEXCEPT
      : fence( fence_ )
      , flags( flags_ )
      , handleType( handleType_ )
      , fd( fd_ )
    {}

    VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportFenceFdInfoKHR( *reinterpret_cast<ImportFenceFdInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR &
                            operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>( &rhs );
      return *this;
    }

    ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ImportFenceFdInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
    {
      fd = fd_;
      return *this;
    }

    operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportFenceFdInfoKHR *>( this );
    }

    operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportFenceFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportFenceFdInfoKHR const & ) const = default;
#else
    bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) &&
             ( handleType == rhs.handleType ) && ( fd == rhs.fd );
    }

    bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType = StructureType::eImportFenceFdInfoKHR;
    const void *                                          pNext = {};
    VULKAN_HPP_NAMESPACE::Fence                           fence = {};
    VULKAN_HPP_NAMESPACE::FenceImportFlags                flags = {};
    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
    int fd = {};
  };
  static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
  {
    using Type = ImportFenceFdInfoKHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct ImportFenceWin32HandleInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence                           fence_ = {},
                                     VULKAN_HPP_NAMESPACE::FenceImportFlags                flags_ = {},
                                     VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
                                       VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd,
                                     HANDLE  handle_ = {},
                                     LPCWSTR name_   = {} ) VULKAN_HPP_NOEXCEPT
      : fence( fence_ )
      , flags( flags_ )
      , handleType( handleType_ )
      , handle( handle_ )
      , name( name_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportFenceWin32HandleInfoKHR( *reinterpret_cast<ImportFenceWin32HandleInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR &
                            operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

    ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
    {
      fence = fence_;
      return *this;
    }

    ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ImportFenceWin32HandleInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }

    ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }

    operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( this );
    }

    operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) &&
             ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name );
    }

    bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType = StructureType::eImportFenceWin32HandleInfoKHR;
    const void *                                          pNext = {};
    VULKAN_HPP_NAMESPACE::Fence                           fence = {};
    VULKAN_HPP_NAMESPACE::FenceImportFlags                flags = {};
    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
    HANDLE  handle = {};
    LPCWSTR name   = {};
  };
  static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportFenceWin32HandleInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportFenceWin32HandleInfoKHR>
  {
    using Type = ImportFenceWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  struct ImportSemaphoreFdInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore                           semaphore_ = {},
                                VULKAN_HPP_NAMESPACE::SemaphoreImportFlags                flags_     = {},
                                VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
                                  VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                int fd_ = {} ) VULKAN_HPP_NOEXCEPT
      : semaphore( semaphore_ )
      , flags( flags_ )
      , handleType( handleType_ )
      , fd( fd_ )
    {}

    VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportSemaphoreFdInfoKHR( *reinterpret_cast<ImportSemaphoreFdInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR &
                            operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>( &rhs );
      return *this;
    }

    ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ImportSemaphoreFdInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
    {
      fd = fd_;
      return *this;
    }

    operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( this );
    }

    operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportSemaphoreFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default;
#else
    bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
             ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd );
    }

    bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                       sType     = StructureType::eImportSemaphoreFdInfoKHR;
    const void *                                              pNext     = {};
    VULKAN_HPP_NAMESPACE::Semaphore                           semaphore = {};
    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags                flags     = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
    int fd = {};
  };
  static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
  {
    using Type = ImportSemaphoreFdInfoKHR;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct ImportSemaphoreWin32HandleInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eImportSemaphoreWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore                           semaphore_ = {},
                                         VULKAN_HPP_NAMESPACE::SemaphoreImportFlags                flags_     = {},
                                         VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
                                           VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                         HANDLE  handle_ = {},
                                         LPCWSTR name_   = {} ) VULKAN_HPP_NOEXCEPT
      : semaphore( semaphore_ )
      , flags( flags_ )
      , handleType( handleType_ )
      , handle( handle_ )
      , name( name_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ImportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR &
                            operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

    ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    ImportSemaphoreWin32HandleInfoKHR &
      setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ImportSemaphoreWin32HandleInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }

    ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }

    operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( this );
    }

    operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
             ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) &&
             ( name == rhs.name );
    }

    bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                       sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
    const void *                                              pNext = {};
    VULKAN_HPP_NAMESPACE::Semaphore                           semaphore = {};
    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags                flags     = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
    HANDLE  handle = {};
    LPCWSTR name   = {};
  };
  static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportSemaphoreWin32HandleInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportSemaphoreWin32HandleInfoKHR>
  {
    using Type = ImportSemaphoreWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  struct ImportSemaphoreZirconHandleInfoFUCHSIA
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportSemaphoreZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::Semaphore                           semaphore_ = {},
                                              VULKAN_HPP_NAMESPACE::SemaphoreImportFlags                flags_     = {},
                                              VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
                                                VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd,
                                              zx_handle_t zirconHandle_ = {} ) VULKAN_HPP_NOEXCEPT
      : semaphore( semaphore_ )
      , flags( flags_ )
      , handleType( handleType_ )
      , zirconHandle( zirconHandle_ )
    {}

    VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportSemaphoreZirconHandleInfoFUCHSIA(
          *reinterpret_cast<ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA &
                            operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportSemaphoreZirconHandleInfoFUCHSIA &
      operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA const *>( &rhs );
      return *this;
    }

    ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportSemaphoreZirconHandleInfoFUCHSIA &
      setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    ImportSemaphoreZirconHandleInfoFUCHSIA &
      setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ImportSemaphoreZirconHandleInfoFUCHSIA &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      zirconHandle = zirconHandle_;
      return *this;
    }

    operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportSemaphoreZirconHandleInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
             ( flags == rhs.flags ) && ( handleType == rhs.handleType ) &&
             ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 );
    }

    bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType        sType     = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA;
    const void *                               pNext     = {};
    VULKAN_HPP_NAMESPACE::Semaphore            semaphore = {};
    VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags     = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
    zx_handle_t zirconHandle = {};
  };
  static_assert( sizeof( ImportSemaphoreZirconHandleInfoFUCHSIA ) == sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportSemaphoreZirconHandleInfoFUCHSIA>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA>
  {
    using Type = ImportSemaphoreZirconHandleInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  struct InitializePerformanceApiInfoINTEL
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eInitializePerformanceApiInfoINTEL;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {} ) VULKAN_HPP_NOEXCEPT
      : pUserData( pUserData_ )
    {}

    VULKAN_HPP_CONSTEXPR
      InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : InitializePerformanceApiInfoINTEL( *reinterpret_cast<InitializePerformanceApiInfoINTEL const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL &
                            operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>( &rhs );
      return *this;
    }

    InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT
    {
      pUserData = pUserData_;
      return *this;
    }

    operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( this );
    }

    operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default;
#else
    bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData );
    }

    bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType     = StructureType::eInitializePerformanceApiInfoINTEL;
    const void *                        pNext     = {};
    void *                              pUserData = {};
  };
  static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<InitializePerformanceApiInfoINTEL>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eInitializePerformanceApiInfoINTEL>
  {
    using Type = InitializePerformanceApiInfoINTEL;
  };

  struct DisplayEventInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ =
                             VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT
      : displayEvent( displayEvent_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayEventInfoEXT( *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT &
                            operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>( &rhs );
      return *this;
    }

    DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
    {
      displayEvent = displayEvent_;
      return *this;
    }

    operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayEventInfoEXT *>( this );
    }

    operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayEventInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayEventInfoEXT const & ) const = default;
#else
    bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent );
    }

    bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType        = StructureType::eDisplayEventInfoEXT;
    const void *                              pNext        = {};
    VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
  };
  static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
  {
    using Type = DisplayEventInfoEXT;
  };

  struct XYColorEXT
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT
      : x( x_ )
      , y( y_ )
    {}

    VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : XYColorEXT( *reinterpret_cast<XYColorEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>( &rhs );
      return *this;
    }

    XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkXYColorEXT *>( this );
    }

    operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkXYColorEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( XYColorEXT const & ) const = default;
#else
    bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( x == rhs.x ) && ( y == rhs.y );
    }

    bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    float x = {};
    float y = {};
  };
  static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<XYColorEXT>::value, "struct wrapper is not a standard layout!" );

  struct HdrMetadataEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_    = {},
                                         VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_  = {},
                                         VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_   = {},
                                         VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_           = {},
                                         float                            maxLuminance_         = {},
                                         float                            minLuminance_         = {},
                                         float                            maxContentLightLevel_ = {},
                                         float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT
      : displayPrimaryRed( displayPrimaryRed_ )
      , displayPrimaryGreen( displayPrimaryGreen_ )
      , displayPrimaryBlue( displayPrimaryBlue_ )
      , whitePoint( whitePoint_ )
      , maxLuminance( maxLuminance_ )
      , minLuminance( minLuminance_ )
      , maxContentLightLevel( maxContentLightLevel_ )
      , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
    {}

    VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : HdrMetadataEXT( *reinterpret_cast<HdrMetadataEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>( &rhs );
      return *this;
    }

    HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    HdrMetadataEXT &
      setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
    {
      displayPrimaryRed = displayPrimaryRed_;
      return *this;
    }

    HdrMetadataEXT &
      setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
    {
      displayPrimaryGreen = displayPrimaryGreen_;
      return *this;
    }

    HdrMetadataEXT &
      setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
    {
      displayPrimaryBlue = displayPrimaryBlue_;
      return *this;
    }

    HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
    {
      whitePoint = whitePoint_;
      return *this;
    }

    HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT
    {
      maxLuminance = maxLuminance_;
      return *this;
    }

    HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT
    {
      minLuminance = minLuminance_;
      return *this;
    }

    HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      maxContentLightLevel = maxContentLightLevel_;
      return *this;
    }

    HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
    {
      maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
      return *this;
    }

    operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkHdrMetadataEXT *>( this );
    }

    operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkHdrMetadataEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( HdrMetadataEXT const & ) const = default;
#else
    bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) &&
             ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) &&
             ( whitePoint == rhs.whitePoint ) && ( maxLuminance == rhs.maxLuminance ) &&
             ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) &&
             ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
    }

    bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                     = StructureType::eHdrMetadataEXT;
    const void *                        pNext                     = {};
    VULKAN_HPP_NAMESPACE::XYColorEXT    displayPrimaryRed         = {};
    VULKAN_HPP_NAMESPACE::XYColorEXT    displayPrimaryGreen       = {};
    VULKAN_HPP_NAMESPACE::XYColorEXT    displayPrimaryBlue        = {};
    VULKAN_HPP_NAMESPACE::XYColorEXT    whitePoint                = {};
    float                               maxLuminance              = {};
    float                               minLuminance              = {};
    float                               maxContentLightLevel      = {};
    float                               maxFrameAverageLightLevel = {};
  };
  static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eHdrMetadataEXT>
  {
    using Type = HdrMetadataEXT;
  };

  struct SemaphoreSignalInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {},
                                              uint64_t                        value_     = {} ) VULKAN_HPP_NOEXCEPT
      : semaphore( semaphore_ )
      , value( value_ )
    {}

    VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreSignalInfo( *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo &
                            operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>( &rhs );
      return *this;
    }

    SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphore = semaphore_;
      return *this;
    }

    SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
    {
      value = value_;
      return *this;
    }

    operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreSignalInfo *>( this );
    }

    operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreSignalInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreSignalInfo const & ) const = default;
#else
    bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) &&
             ( value == rhs.value );
    }

    bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType     = StructureType::eSemaphoreSignalInfo;
    const void *                        pNext     = {};
    VULKAN_HPP_NAMESPACE::Semaphore     semaphore = {};
    uint64_t                            value     = {};
  };
  static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
  {
    using Type = SemaphoreSignalInfo;
  };
  using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoSessionParametersUpdateInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoSessionParametersUpdateInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : updateSequenceCount( updateSequenceCount_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( VideoSessionParametersUpdateInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    VideoSessionParametersUpdateInfoKHR( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoSessionParametersUpdateInfoKHR( *reinterpret_cast<VideoSessionParametersUpdateInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR &
                            operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoSessionParametersUpdateInfoKHR &
      operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR const *>( &rhs );
      return *this;
    }

    VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoSessionParametersUpdateInfoKHR & setUpdateSequenceCount( uint32_t updateSequenceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      updateSequenceCount = updateSequenceCount_;
      return *this;
    }

    operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( this );
    }

    operator VkVideoSessionParametersUpdateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoSessionParametersUpdateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default;
#  else
    bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( updateSequenceCount == rhs.updateSequenceCount );
    }

    bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::eVideoSessionParametersUpdateInfoKHR;
    const void *                        pNext               = {};
    uint32_t                            updateSequenceCount = {};
  };
  static_assert( sizeof( VideoSessionParametersUpdateInfoKHR ) == sizeof( VkVideoSessionParametersUpdateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoSessionParametersUpdateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoSessionParametersUpdateInfoKHR>
  {
    using Type = VideoSessionParametersUpdateInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  struct SemaphoreWaitInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_          = {},
                                            uint32_t                                 semaphoreCount_ = {},
                                            const VULKAN_HPP_NAMESPACE::Semaphore *  pSemaphores_    = {},
                                            const uint64_t *                         pValues_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , semaphoreCount( semaphoreCount_ )
      , pSemaphores( pSemaphores_ )
      , pValues( pValues_ )
    {}

    VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreWaitInfo( *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SemaphoreWaitInfo(
      VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags                                                     flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const &                        values_ = {} )
      : flags( flags_ )
      , semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) )
      , pSemaphores( semaphores_.data() )
      , pValues( values_.data() )
    {
#    ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() );
#    else
      if ( semaphores_.size() != values_.size() )
      {
        throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                          "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
      }
#    endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo &
                            operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>( &rhs );
      return *this;
    }

    SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphoreCount = semaphoreCount_;
      return *this;
    }

    SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT
    {
      pSemaphores = pSemaphores_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SemaphoreWaitInfo & setSemaphores(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ )
      VULKAN_HPP_NOEXCEPT
    {
      semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
      pSemaphores    = semaphores_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pValues = pValues_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    SemaphoreWaitInfo &
      setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphoreCount = static_cast<uint32_t>( values_.size() );
      pValues        = values_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreWaitInfo *>( this );
    }

    operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreWaitInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreWaitInfo const & ) const = default;
#else
    bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( semaphoreCount == rhs.semaphoreCount ) && ( pSemaphores == rhs.pSemaphores ) &&
             ( pValues == rhs.pValues );
    }

    bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType          = StructureType::eSemaphoreWaitInfo;
    const void *                             pNext          = {};
    VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags          = {};
    uint32_t                                 semaphoreCount = {};
    const VULKAN_HPP_NAMESPACE::Semaphore *  pSemaphores    = {};
    const uint64_t *                         pValues        = {};
  };
  static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
  {
    using Type = SemaphoreWaitInfo;
  };
  using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;

#ifndef VULKAN_HPP_NO_SMART_HANDLE
  class Device;
  template <typename Dispatch>
  class UniqueHandleTraits<AccelerationStructureKHR, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueAccelerationStructureKHR = UniqueHandle<AccelerationStructureKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<AccelerationStructureNV, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<Buffer, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueBuffer = UniqueHandle<Buffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<BufferView, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueBufferView = UniqueHandle<BufferView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<CommandBuffer, Dispatch>
  {
  public:
    using deleter = PoolFree<Device, CommandPool, Dispatch>;
  };
  using UniqueCommandBuffer = UniqueHandle<CommandBuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<CommandPool, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueCommandPool = UniqueHandle<CommandPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<CuFunctionNVX, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueCuFunctionNVX = UniqueHandle<CuFunctionNVX, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<CuModuleNVX, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueCuModuleNVX = UniqueHandle<CuModuleNVX, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<DeferredOperationKHR, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueDeferredOperationKHR = UniqueHandle<DeferredOperationKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<DescriptorPool, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueDescriptorPool = UniqueHandle<DescriptorPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<DescriptorSet, Dispatch>
  {
  public:
    using deleter = PoolFree<Device, DescriptorPool, Dispatch>;
  };
  using UniqueDescriptorSet = UniqueHandle<DescriptorSet, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<DescriptorSetLayout, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<DescriptorUpdateTemplate, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueDescriptorUpdateTemplate    = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  using UniqueDescriptorUpdateTemplateKHR = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<DeviceMemory, Dispatch>
  {
  public:
    using deleter = ObjectFree<Device, Dispatch>;
  };
  using UniqueDeviceMemory = UniqueHandle<DeviceMemory, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<Event, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueEvent = UniqueHandle<Event, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<Fence, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueFence = UniqueHandle<Fence, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<Framebuffer, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueFramebuffer = UniqueHandle<Framebuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<Image, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueImage = UniqueHandle<Image, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<ImageView, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueImageView = UniqueHandle<ImageView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<IndirectCommandsLayoutNV, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<Pipeline, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniquePipeline = UniqueHandle<Pipeline, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<PipelineCache, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniquePipelineCache = UniqueHandle<PipelineCache, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<PipelineLayout, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniquePipelineLayout = UniqueHandle<PipelineLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<PrivateDataSlotEXT, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniquePrivateDataSlotEXT = UniqueHandle<PrivateDataSlotEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<QueryPool, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueQueryPool = UniqueHandle<QueryPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<RenderPass, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueRenderPass = UniqueHandle<RenderPass, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<Sampler, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueSampler = UniqueHandle<Sampler, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<SamplerYcbcrConversion, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueSamplerYcbcrConversion    = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  using UniqueSamplerYcbcrConversionKHR = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<Semaphore, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueSemaphore = UniqueHandle<Semaphore, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<ShaderModule, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueShaderModule = UniqueHandle<ShaderModule, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<SwapchainKHR, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<ValidationCacheEXT, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueValidationCacheEXT = UniqueHandle<ValidationCacheEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
#  if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <typename Dispatch>
  class UniqueHandleTraits<VideoSessionKHR, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueVideoSessionKHR = UniqueHandle<VideoSessionKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/
#  if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <typename Dispatch>
  class UniqueHandleTraits<VideoSessionParametersKHR, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Device, Dispatch>;
  };
  using UniqueVideoSessionParametersKHR = UniqueHandle<VideoSessionParametersKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
#  endif /*VK_ENABLE_BETA_EXTENSIONS*/
#endif   /*VULKAN_HPP_NO_SMART_HANDLE*/

  class Device
  {
  public:
    using CType = VkDevice;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;

  public:
    VULKAN_HPP_CONSTEXPR         Device() = default;
    VULKAN_HPP_CONSTEXPR         Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT
    {
      m_device = device;
      return *this;
    }
#endif

    Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_device = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Device const & ) const = default;
#else
    bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_device == rhs.m_device;
    }

    bool operator!=( Device const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_device != rhs.m_device;
    }

    bool operator<( Device const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_device < rhs.m_device;
    }
#endif

    //=== VK_VERSION_1_0 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    PFN_vkVoidFunction
      getProcAddr( const char *     pName,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    PFN_vkVoidFunction
      getProcAddr( const std::string & name,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getQueue( uint32_t                      queueFamilyIndex,
                   uint32_t                      queueIndex,
                   VULKAN_HPP_NAMESPACE::Queue * pQueue,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue
                         getQueue( uint32_t         queueFamilyIndex,
                                   uint32_t         queueIndex,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo *  pAllocateInfo,
                                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                         VULKAN_HPP_NAMESPACE::DeviceMemory *              pMemory,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
      allocateMemory( const MemoryAllocateInfo &          allocateInfo,
                      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
      allocateMemoryUnique( const MemoryAllocateInfo &          allocateInfo,
                            Optional<const AllocationCallbacks> allocator
                                                                VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory                memory,
                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                     Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void free( VULKAN_HPP_NAMESPACE::DeviceMemory                memory,
               const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void free( VULKAN_HPP_NAMESPACE::DeviceMemory  memory,
               Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory   memory,
                                    VULKAN_HPP_NAMESPACE::DeviceSize     offset,
                                    VULKAN_HPP_NAMESPACE::DeviceSize     size,
                                    VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
                                    void **                              ppData,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void *>::type
      mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory   memory,
                 VULKAN_HPP_NAMESPACE::DeviceSize     offset,
                 VULKAN_HPP_NAMESPACE::DeviceSize     size,
                 VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         flushMappedMemoryRanges( uint32_t                                        memoryRangeCount,
                                                  const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges(
      uint32_t                                        memoryRangeCount,
      const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                              VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize
                         getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer       buffer,
                                           VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                                           VULKAN_HPP_NAMESPACE::DeviceSize   memoryOffset,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer       buffer,
                        VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                        VULKAN_HPP_NAMESPACE::DeviceSize   memoryOffset,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         bindImageMemory( VULKAN_HPP_NAMESPACE::Image        image,
                                          VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                                          VULKAN_HPP_NAMESPACE::DeviceSize   memoryOffset,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      bindImageMemory( VULKAN_HPP_NAMESPACE::Image        image,
                       VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                       VULKAN_HPP_NAMESPACE::DeviceSize   memoryOffset,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer               buffer,
                                      VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements(
      VULKAN_HPP_NAMESPACE::Buffer buffer,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image                image,
                                     VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements(
      VULKAN_HPP_NAMESPACE::Image image,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getImageSparseMemoryRequirements(
      VULKAN_HPP_NAMESPACE::Image                           image,
      uint32_t *                                            pSparseMemoryRequirementCount,
      VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>,
              typename Dispatch                               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
                         getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
                                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>,
              typename Dispatch                               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                                      = SparseImageMemoryRequirementsAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value,
                                      int>::type              = 0>
    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
                         getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image              image,
                                                           SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
                                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo *     pCreateInfo,
                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                      VULKAN_HPP_NAMESPACE::Fence *                     pFence,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
      createFence( const FenceCreateInfo &             createInfo,
                   Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
      createFenceUnique( const FenceCreateInfo &             createInfo,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyFence( VULKAN_HPP_NAMESPACE::Fence                       fence,
                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence   VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                       Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Fence                       fence,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Fence         fence,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         resetFences( uint32_t                            fenceCount,
                                      const VULKAN_HPP_NAMESPACE::Fence * pFences,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
      resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         waitForFences( uint32_t                            fenceCount,
                                        const VULKAN_HPP_NAMESPACE::Fence * pFences,
                                        VULKAN_HPP_NAMESPACE::Bool32        waitAll,
                                        uint64_t                            timeout,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
                                               VULKAN_HPP_NAMESPACE::Bool32                          waitAll,
                                               uint64_t                                              timeout,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          VULKAN_HPP_NAMESPACE::Semaphore *                 pSemaphore,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
      createSemaphore( const SemaphoreCreateInfo &         createInfo,
                       Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
      createSemaphoreUnique( const SemaphoreCreateInfo &         createInfo,
                             Optional<const AllocationCallbacks> allocator
                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore                   semaphore,
                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Semaphore                   semaphore,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Semaphore     semaphore,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo *     pCreateInfo,
                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                      VULKAN_HPP_NAMESPACE::Event *                     pEvent,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type
      createEvent( const EventCreateInfo &             createInfo,
                   Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type
      createEventUnique( const EventCreateInfo &             createInfo,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyEvent( VULKAN_HPP_NAMESPACE::Event                       event,
                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyEvent( VULKAN_HPP_NAMESPACE::Event event   VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                       Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Event                       event,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Event         event,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         setEvent( VULKAN_HPP_NAMESPACE::Event event,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         resetEvent( VULKAN_HPP_NAMESPACE::Event event,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
      resetEvent( VULKAN_HPP_NAMESPACE::Event event,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          VULKAN_HPP_NAMESPACE::QueryPool *                 pQueryPool,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
      createQueryPool( const QueryPoolCreateInfo &         createInfo,
                       Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
      createQueryPoolUnique( const QueryPoolCreateInfo &         createInfo,
                             Optional<const AllocationCallbacks> allocator
                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool                   queryPool,
                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::QueryPool                   queryPool,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::QueryPool     queryPool,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                                              uint32_t                               firstQuery,
                                              uint32_t                               queryCount,
                                              size_t                                 dataSize,
                                              void *                                 pData,
                                              VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                                              VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                                              uint32_t                               firstQuery,
                                              uint32_t                               queryCount,
                                              ArrayProxy<T> const &                  data,
                                              VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                                              VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T,
              typename Allocator = std::allocator<T>,
              typename Dispatch  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<T, Allocator>>
                         getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                                              uint32_t                               firstQuery,
                                              uint32_t                               queryCount,
                                              size_t                                 dataSize,
                                              VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                                              VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<T>
                         getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                                             uint32_t                               firstQuery,
                                             uint32_t                               queryCount,
                                             VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                                             VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo *    pCreateInfo,
                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                       VULKAN_HPP_NAMESPACE::Buffer *                    pBuffer,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type
      createBuffer( const BufferCreateInfo &            createInfo,
                    Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type
      createBufferUnique( const BufferCreateInfo &            createInfo,
                          Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer                      buffer,
                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                        Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Buffer                      buffer,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Buffer        buffer,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
                                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks *  pAllocator,
                                           VULKAN_HPP_NAMESPACE::BufferView *                 pView,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
      createBufferView( const BufferViewCreateInfo &        createInfo,
                        Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
      createBufferViewUnique( const BufferViewCreateInfo &        createInfo,
                              Optional<const AllocationCallbacks> allocator
                                                                  VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView                  bufferView,
                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::BufferView                  bufferView,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::BufferView    bufferView,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo *     pCreateInfo,
                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                      VULKAN_HPP_NAMESPACE::Image *                     pImage,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type
      createImage( const ImageCreateInfo &             createInfo,
                   Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type
      createImageUnique( const ImageCreateInfo &             createInfo,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyImage( VULKAN_HPP_NAMESPACE::Image                       image,
                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyImage( VULKAN_HPP_NAMESPACE::Image image   VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                       Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Image                       image,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Image         image,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image                    image,
                                    const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
                                    VULKAN_HPP_NAMESPACE::SubresourceLayout *      pLayout,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout(
      VULKAN_HPP_NAMESPACE::Image image,
      const ImageSubresource &    subresource,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          VULKAN_HPP_NAMESPACE::ImageView *                 pView,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
      createImageView( const ImageViewCreateInfo &         createInfo,
                       Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
      createImageViewUnique( const ImageViewCreateInfo &         createInfo,
                             Optional<const AllocationCallbacks> allocator
                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView                   imageView,
                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::ImageView                   imageView,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::ImageView     imageView,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks *    pAllocator,
                                             VULKAN_HPP_NAMESPACE::ShaderModule *                 pShaderModule,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
      createShaderModule( const ShaderModuleCreateInfo &      createInfo,
                          Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
      createShaderModuleUnique( const ShaderModuleCreateInfo &      createInfo,
                                Optional<const AllocationCallbacks> allocator
                                                                    VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule                shaderModule,
                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::ShaderModule                shaderModule,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::ShaderModule  shaderModule,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
                                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks *     pAllocator,
                                              VULKAN_HPP_NAMESPACE::PipelineCache *                 pPipelineCache,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
      createPipelineCache( const PipelineCacheCreateInfo &     createInfo,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
      createPipelineCacheUnique( const PipelineCacheCreateInfo &     createInfo,
                                 Optional<const AllocationCallbacks> allocator
                                                                     VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache               pipelineCache,
                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                               Optional<const AllocationCallbacks>               allocator
                                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::PipelineCache               pipelineCache,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
                                               size_t *                            pDataSize,
                                               void *                              pData,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Uint8_tAllocator = std::allocator<uint8_t>,
              typename Dispatch         = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
      getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Uint8_tAllocator = std::allocator<uint8_t>,
              typename Dispatch         = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                = Uint8_tAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
      getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
                            Uint8_tAllocator &                  uint8_tAllocator,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache         dstCache,
                                              uint32_t                                    srcCacheCount,
                                              const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache                           dstCache,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache                      pipelineCache,
                                                  uint32_t                                                 createInfoCount,
                                                  const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
                                                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks *        pAllocator,
                                                  VULKAN_HPP_NAMESPACE::Pipeline *                         pPipelines,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PipelineAllocator = std::allocator<Pipeline>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines(
      VULKAN_HPP_NAMESPACE::PipelineCache                                        pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PipelineAllocator = std::allocator<Pipeline>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                 = PipelineAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>>
                         createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache                                        pipelineCache,
                                                  ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
                                                  Optional<const AllocationCallbacks>                                        allocator,
                                                  PipelineAllocator & pipelineAllocator,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline(
      VULKAN_HPP_NAMESPACE::PipelineCache                      pipelineCache,
      const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                         createGraphicsPipelinesUnique(
                           VULKAN_HPP_NAMESPACE::PipelineCache                                        pipelineCache,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename PipelineAllocator         = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
              typename B                         = PipelineAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
                                      int>::type = 0>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                         createGraphicsPipelinesUnique(
                           VULKAN_HPP_NAMESPACE::PipelineCache                                        pipelineCache,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
                           Optional<const AllocationCallbacks>                                        allocator,
                           PipelineAllocator &                                                        pipelineAllocator,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique(
      VULKAN_HPP_NAMESPACE::PipelineCache                      pipelineCache,
      const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache                     pipelineCache,
                                                 uint32_t                                                createInfoCount,
                                                 const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
                                                 const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                                 VULKAN_HPP_NAMESPACE::Pipeline *                        pPipelines,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PipelineAllocator = std::allocator<Pipeline>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines(
      VULKAN_HPP_NAMESPACE::PipelineCache                                       pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PipelineAllocator = std::allocator<Pipeline>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                 = PipelineAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>>
                         createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache                                       pipelineCache,
                                                 ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
                                                 Optional<const AllocationCallbacks>                                       allocator,
                                                 PipelineAllocator & pipelineAllocator,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline(
      VULKAN_HPP_NAMESPACE::PipelineCache                     pipelineCache,
      const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                         createComputePipelinesUnique(
                           VULKAN_HPP_NAMESPACE::PipelineCache                                       pipelineCache,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename PipelineAllocator         = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
              typename B                         = PipelineAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
                                      int>::type = 0>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                         createComputePipelinesUnique(
                           VULKAN_HPP_NAMESPACE::PipelineCache                                       pipelineCache,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
                           Optional<const AllocationCallbacks>                                       allocator,
                           PipelineAllocator &                                                       pipelineAllocator,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique(
      VULKAN_HPP_NAMESPACE::PipelineCache                     pipelineCache,
      const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline                    pipeline,
                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                          Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Pipeline                    pipeline,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Pipeline      pipeline,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
                                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks *      pAllocator,
                                               VULKAN_HPP_NAMESPACE::PipelineLayout *                 pPipelineLayout,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
      createPipelineLayout( const PipelineLayoutCreateInfo &    createInfo,
                            Optional<const AllocationCallbacks> allocator
                                                                VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
      createPipelineLayoutUnique( const PipelineLayoutCreateInfo &    createInfo,
                                  Optional<const AllocationCallbacks> allocator
                                                                      VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout              pipelineLayout,
                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyPipelineLayout(
      VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout              pipelineLayout,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo *   pCreateInfo,
                                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                        VULKAN_HPP_NAMESPACE::Sampler *                   pSampler,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type
      createSampler( const SamplerCreateInfo &           createInfo,
                     Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type
      createSamplerUnique( const SamplerCreateInfo &           createInfo,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySampler( VULKAN_HPP_NAMESPACE::Sampler                     sampler,
                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Sampler                     sampler,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Sampler       sampler,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createDescriptorSetLayout(
      const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *           pAllocator,
      VULKAN_HPP_NAMESPACE::DescriptorSetLayout *                 pSetLayout,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
      createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo,
                                 Optional<const AllocationCallbacks>   allocator
                                                                       VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
      createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo,
                                       Optional<const AllocationCallbacks>   allocator
                                                                             VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout         descriptorSetLayout,
                                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDescriptorSetLayout(
      VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout         descriptorSetLayout,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
                                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks *      pAllocator,
                                               VULKAN_HPP_NAMESPACE::DescriptorPool *                 pDescriptorPool,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
      createDescriptorPool( const DescriptorPoolCreateInfo &    createInfo,
                            Optional<const AllocationCallbacks> allocator
                                                                VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
      createDescriptorPoolUnique( const DescriptorPoolCreateInfo &    createInfo,
                                  Optional<const AllocationCallbacks> allocator
                                                                      VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool              descriptorPool,
                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDescriptorPool(
      VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool              descriptorPool,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool           descriptorPool,
                                VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
      resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool           descriptorPool,
                           VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
                                                 VULKAN_HPP_NAMESPACE::DescriptorSet *                   pDescriptorSets,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>,
              typename Dispatch               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
      allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>,
              typename Dispatch               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                      = DescriptorSetAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
      allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo,
                              DescriptorSetAllocator &          descriptorSetAllocator,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
      allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename DescriptorSetAllocator    = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>,
      typename B                         = DescriptorSetAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value,
                              int>::type = 0>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
      allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo,
                                    DescriptorSetAllocator &          descriptorSetAllocator,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool        descriptorPool,
                               uint32_t                                    descriptorSetCount,
                               const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
      freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool                          descriptorPool,
                          ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Result free( VULKAN_HPP_NAMESPACE::DescriptorPool        descriptorPool,
                 uint32_t                                    descriptorSetCount,
                 const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
      free( VULKAN_HPP_NAMESPACE::DescriptorPool                          descriptorPool,
            ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void updateDescriptorSets( uint32_t                                         descriptorWriteCount,
                               const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
                               uint32_t                                         descriptorCopyCount,
                               const VULKAN_HPP_NAMESPACE::CopyDescriptorSet *  pDescriptorCopies,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
                               ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const &  descriptorCopies,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                                            VULKAN_HPP_NAMESPACE::Framebuffer *                 pFramebuffer,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
      createFramebuffer( const FramebufferCreateInfo &       createInfo,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
      createFramebufferUnique( const FramebufferCreateInfo &       createInfo,
                               Optional<const AllocationCallbacks> allocator
                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer                 framebuffer,
                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                          Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Framebuffer                 framebuffer,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::Framebuffer   framebuffer,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
                                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks *  pAllocator,
                                           VULKAN_HPP_NAMESPACE::RenderPass *                 pRenderPass,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
      createRenderPass( const RenderPassCreateInfo &        createInfo,
                        Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
      createRenderPassUnique( const RenderPassCreateInfo &        createInfo,
                              Optional<const AllocationCallbacks> allocator
                                                                  VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass                  renderPass,
                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::RenderPass                  renderPass,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::RenderPass    renderPass,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
                                VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
                         getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                                            VULKAN_HPP_NAMESPACE::CommandPool *                 pCommandPool,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
      createCommandPool( const CommandPoolCreateInfo &       createInfo,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
      createCommandPoolUnique( const CommandPoolCreateInfo &       createInfo,
                               Optional<const AllocationCallbacks> allocator
                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool                 commandPool,
                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                          Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::CommandPool                 commandPool,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::CommandPool   commandPool,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool           commandPool,
                                           VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
      resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool           commandPool,
                        VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
                                                 VULKAN_HPP_NAMESPACE::CommandBuffer *                   pCommandBuffers,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename CommandBufferAllocator = std::allocator<CommandBuffer>,
              typename Dispatch               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
      allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename CommandBufferAllocator = std::allocator<CommandBuffer>,
              typename Dispatch               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                      = CommandBufferAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
      allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo,
                              CommandBufferAllocator &          commandBufferAllocator,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
      allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename CommandBufferAllocator    = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>,
      typename B                         = CommandBufferAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value,
                              int>::type = 0>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
      allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo,
                                    CommandBufferAllocator &          commandBufferAllocator,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool           commandPool,
                             uint32_t                                    commandBufferCount,
                             const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool                             commandPool,
                             ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void free( VULKAN_HPP_NAMESPACE::CommandPool           commandPool,
               uint32_t                                    commandBufferCount,
               const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void free( VULKAN_HPP_NAMESPACE::CommandPool                             commandPool,
               ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_VERSION_1_1 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         bindBufferMemory2( uint32_t                                           bindInfoCount,
                                            const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         bindImageMemory2( uint32_t                                          bindInfoCount,
                                           const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getGroupPeerMemoryFeatures( uint32_t                                       heapIndex,
                                     uint32_t                                       localDeviceIndex,
                                     uint32_t                                       remoteDeviceIndex,
                                     VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures(
      uint32_t         heapIndex,
      uint32_t         localDeviceIndex,
      uint32_t         remoteDeviceIndex,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
                                      VULKAN_HPP_NAMESPACE::MemoryRequirements2 *                pMemoryRequirements,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2(
      const ImageMemoryRequirementsInfo2 & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2(
      const ImageMemoryRequirementsInfo2 & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
                                       VULKAN_HPP_NAMESPACE::MemoryRequirements2 *                 pMemoryRequirements,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2(
      const BufferMemoryRequirementsInfo2 & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2(
      const BufferMemoryRequirementsInfo2 & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getImageSparseMemoryRequirements2(
      const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
      uint32_t *                                                       pSparseMemoryRequirementCount,
      VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *           pSparseMemoryRequirements,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
              typename Dispatch                                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
                         getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info,
                                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
              typename Dispatch                                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                                       = SparseImageMemoryRequirements2Allocator,
              typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value,
                                      int>::type               = 0>
    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
                         getImageSparseMemoryRequirements2(
                           const ImageSparseMemoryRequirementsInfo2 & info,
                           SparseImageMemoryRequirements2Allocator &  sparseImageMemoryRequirements2Allocator,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool          commandPool,
                          VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
                    VULKAN_HPP_NAMESPACE::Queue *                  pQueue,
                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue
                         getQueue2( const DeviceQueueInfo2 & queueInfo,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion(
      const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *              pAllocator,
      VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion *                 pYcbcrConversion,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
      createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo,
                                    Optional<const AllocationCallbacks>      allocator
                                                                             VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
      createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
                                          Optional<const AllocationCallbacks>      allocator
                                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion      ycbcrConversion,
                                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySamplerYcbcrConversion(
      VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion      ycbcrConversion,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate(
      const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                pAllocator,
      VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate *                 pDescriptorUpdateTemplate,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
      createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo,
                                      Optional<const AllocationCallbacks>        allocator
                                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
      createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
                                            Optional<const AllocationCallbacks>        allocator
                                                                                       VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate    descriptorUpdateTemplate,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDescriptorUpdateTemplate(
      VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate    descriptorUpdateTemplate,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet            descriptorSet,
                                          VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                                          const void *                                   pData,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
                                        VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport *          pSupport,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport(
      const DescriptorSetLayoutCreateInfo & createInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport(
      const DescriptorSetLayoutCreateInfo & createInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_VERSION_1_2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                                            VULKAN_HPP_NAMESPACE::RenderPass *                  pRenderPass,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
      createRenderPass2( const RenderPassCreateInfo2 &       createInfo,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
      createRenderPass2Unique( const RenderPassCreateInfo2 &       createInfo,
                               Optional<const AllocationCallbacks> allocator
                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                         uint32_t                        firstQuery,
                         uint32_t                        queryCount,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
                                                   uint64_t *                      pValue,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
      getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
                                         uint64_t                                        timeout,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result waitSemaphores( const SemaphoreWaitInfo & waitInfo,
                                                uint64_t                  timeout,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      signalSemaphore( const SemaphoreSignalInfo & signalInfo,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    DeviceAddress
      getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    DeviceAddress
      getBufferAddress( const BufferDeviceAddressInfo & info,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_swapchain ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks *    pAllocator,
                                             VULKAN_HPP_NAMESPACE::SwapchainKHR *                 pSwapchain,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
      createSwapchainKHR( const SwapchainCreateInfoKHR &      createInfo,
                          Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
      createSwapchainKHRUnique( const SwapchainCreateInfoKHR &      createInfo,
                                Optional<const AllocationCallbacks> allocator
                                                                    VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR                swapchain,
                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR                swapchain,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR  swapchain,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                                uint32_t *                         pSwapchainImageCount,
                                                VULKAN_HPP_NAMESPACE::Image *      pSwapchainImages,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename ImageAllocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type
      getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename ImageAllocator = std::allocator<Image>,
              typename Dispatch       = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B              = ImageAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type
      getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                             ImageAllocator &                   imageAllocator,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                              uint64_t                           timeout,
                                              VULKAN_HPP_NAMESPACE::Semaphore    semaphore,
                                              VULKAN_HPP_NAMESPACE::Fence        fence,
                                              uint32_t *                         pImageIndex,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<uint32_t>
                         acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                              uint64_t                           timeout,
                                              VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                                              VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR(
      VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
      getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR(
      VULKAN_HPP_NAMESPACE::SurfaceKHR                       surface,
      VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
      getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
                                               uint32_t *                                            pImageIndex,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<uint32_t>
                         acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_display_swapchain ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR(
      uint32_t                                             swapchainCount,
      const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *    pAllocator,
      VULKAN_HPP_NAMESPACE::SwapchainKHR *                 pSwapchains,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>,
              typename Dispatch              = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type
      createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
                                 Optional<const AllocationCallbacks>                                    allocator
                                                                                                        VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>,
              typename Dispatch              = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                     = SwapchainKHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type
      createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
                                 Optional<const AllocationCallbacks>                                    allocator,
                                 SwapchainKHRAllocator & swapchainKHRAllocator,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR(
      const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch              = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
      createSharedSwapchainsKHRUnique(
        ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
        Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename SwapchainKHRAllocator     = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>,
              typename B                         = SwapchainKHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value,
                                      int>::type = 0>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
      createSharedSwapchainsKHRUnique(
        ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
        Optional<const AllocationCallbacks>                                    allocator,
        SwapchainKHRAllocator &                                                swapchainKHRAllocator,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type
      createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
                                      Optional<const AllocationCallbacks>                  allocator
                                                                                           VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_debug_marker ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT(
      const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT(
      const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
    //=== VK_KHR_video_queue ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
                                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                                VULKAN_HPP_NAMESPACE::VideoSessionKHR *                 pVideoSession,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
      createVideoSessionKHR( const VideoSessionCreateInfoKHR &   createInfo,
                             Optional<const AllocationCallbacks> allocator
                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
      createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR &   createInfo,
                                   Optional<const AllocationCallbacks> allocator
                                                                       VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR             videoSession,
                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyVideoSessionKHR(
      VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR             videoSession,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR(
      VULKAN_HPP_NAMESPACE::VideoSessionKHR               videoSession,
      uint32_t *                                          pVideoSessionMemoryRequirementsCount,
      VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename VideoGetMemoryPropertiesKHRAllocator = std::allocator<VideoGetMemoryPropertiesKHR>,
              typename Dispatch                             = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
      getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename VideoGetMemoryPropertiesKHRAllocator = std::allocator<VideoGetMemoryPropertiesKHR>,
      typename Dispatch                             = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                    = VideoGetMemoryPropertiesKHRAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, VideoGetMemoryPropertiesKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
      getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR  videoSession,
                                            VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR(
      VULKAN_HPP_NAMESPACE::VideoSessionKHR            videoSession,
      uint32_t                                         videoSessionBindMemoryCount,
      const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindVideoSessionMemoryKHR(
      VULKAN_HPP_NAMESPACE::VideoSessionKHR                              videoSession,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR(
      const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                 pAllocator,
      VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR *                 pVideoSessionParameters,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
      createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR & createInfo,
                                       Optional<const AllocationCallbacks>         allocator
                                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
      createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo,
                                             Optional<const AllocationCallbacks>         allocator
                                                                                         VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR(
      VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR                   videoSessionParameters,
      const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
                                       const VideoSessionParametersUpdateInfoKHR &     updateInfo,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR   videoSessionParameters,
                                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyVideoSessionParametersKHR(
      VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR   videoSessionParameters,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_ENABLE_BETA_EXTENSIONS*/

    //=== VK_NVX_binary_import ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                                            VULKAN_HPP_NAMESPACE::CuModuleNVX *                 pModule,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
      createCuModuleNVX( const CuModuleCreateInfoNVX &       createInfo,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
      createCuModuleNVXUnique( const CuModuleCreateInfoNVX &       createInfo,
                               Optional<const AllocationCallbacks> allocator
                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
                                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks *     pAllocator,
                                              VULKAN_HPP_NAMESPACE::CuFunctionNVX *                 pFunction,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
      createCuFunctionNVX( const CuFunctionCreateInfoNVX &     createInfo,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
      createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX &     createInfo,
                                 Optional<const AllocationCallbacks> allocator
                                                                     VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX                 module,
                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX   module,
                          Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX                 module,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX   module,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX               function,
                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
                               Optional<const AllocationCallbacks> allocator
                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX               function,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_NVX_image_view_handle ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint32_t
      getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint32_t
      getImageViewHandleNVX( const ImageViewHandleInfoNVX & info,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView                       imageView,
                                                 VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
      getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_AMD_shader_info ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline            pipeline,
                                           VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
                                           VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD   infoType,
                                           size_t *                                  pInfoSize,
                                           void *                                    pInfo,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Uint8_tAllocator = std::allocator<uint8_t>,
              typename Dispatch         = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
      getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline            pipeline,
                        VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
                        VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD   infoType,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Uint8_tAllocator = std::allocator<uint8_t>,
              typename Dispatch         = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                = Uint8_tAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
      getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline            pipeline,
                        VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
                        VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD   infoType,
                        Uint8_tAllocator &                        uint8_tAllocator,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_NV_external_memory_win32 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory                    memory,
                                                 VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
                                                 HANDLE *                                              pHandle,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
      getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory                    memory,
                              VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_KHR_device_group ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getGroupPeerMemoryFeaturesKHR( uint32_t                                       heapIndex,
                                        uint32_t                                       localDeviceIndex,
                                        uint32_t                                       remoteDeviceIndex,
                                        VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR(
      uint32_t         heapIndex,
      uint32_t         localDeviceIndex,
      uint32_t         remoteDeviceIndex,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_maintenance1 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool          commandPool,
                             VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_KHR_external_memory_win32 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
                                                  HANDLE *                                                  pHandle,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
      getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR(
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
      HANDLE                                                 handle,
      VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
      getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                         HANDLE                                                 handle,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_KHR_external_memory_fd ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
                                         int *                                            pFd,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
      getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo,
                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                                   int                                                    fd,
                                                   VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR *          pMemoryFdProperties,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
      getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                int                                                    fd,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_KHR_external_semaphore_win32 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR(
      const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR(
      const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
      HANDLE *                                                     pHandle,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
      getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_KHR_external_semaphore_fd ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
                                            int *                                               pFd,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
      getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_descriptor_update_template ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR(
      const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                pAllocator,
      VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate *                 pDescriptorUpdateTemplate,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
      createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo,
                                         Optional<const AllocationCallbacks>        allocator
                                                                                    VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
      createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
                                               Optional<const AllocationCallbacks>        allocator
                                                                                          VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate    descriptorUpdateTemplate,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDescriptorUpdateTemplateKHR(
      VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet            descriptorSet,
                                             VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                                             const void *                                   pData,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    //=== VK_EXT_display_control ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR                  display,
                                                 const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
      displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
                              const DisplayPowerInfoEXT &      displayPowerInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT *  pDeviceEventInfo,
                                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                           VULKAN_HPP_NAMESPACE::Fence *                     pFence,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
      registerEventEXT( const DeviceEventInfoEXT &          deviceEventInfo,
                        Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
      registerEventEXTUnique( const DeviceEventInfoEXT &          deviceEventInfo,
                              Optional<const AllocationCallbacks> allocator
                                                                  VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR                  display,
                                                  const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
                                                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                  VULKAN_HPP_NAMESPACE::Fence *                     pFence,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT(
      VULKAN_HPP_NAMESPACE::DisplayKHR    display,
      const DisplayEventInfoEXT &         displayEventInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
      registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR    display,
                                     const DisplayEventInfoEXT &         displayEventInfo,
                                     Optional<const AllocationCallbacks> allocator
                                                                         VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR              swapchain,
                                                 VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
                                                 uint64_t *                                      pCounterValue,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
      getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR              swapchain,
                              VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_GOOGLE_display_timing ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE(
      VULKAN_HPP_NAMESPACE::SwapchainKHR                 swapchain,
      VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
      getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE(
      VULKAN_HPP_NAMESPACE::SwapchainKHR                   swapchain,
      uint32_t *                                           pPresentationTimingCount,
      VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>,
              typename Dispatch                              = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
      getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>,
      typename Dispatch                              = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                     = PastPresentationTimingGOOGLEAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
      getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR      swapchain,
                                       PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_hdr_metadata ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setHdrMetadataEXT( uint32_t                                     swapchainCount,
                            const VULKAN_HPP_NAMESPACE::SwapchainKHR *   pSwapchains,
                            const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const &   swapchains,
                            ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_create_renderpass2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
                                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                                               VULKAN_HPP_NAMESPACE::RenderPass *                  pRenderPass,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
      createRenderPass2KHR( const RenderPassCreateInfo2 &       createInfo,
                            Optional<const AllocationCallbacks> allocator
                                                                VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
      createRenderPass2KHRUnique( const RenderPassCreateInfo2 &       createInfo,
                                  Optional<const AllocationCallbacks> allocator
                                                                      VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_shared_presentable_image ===

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR(
      VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_KHR_external_fence_win32 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR(
      const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
                                                 HANDLE *                                                 pHandle,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type
      getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_KHR_external_fence_fd ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
                                        int *                                           pFd,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type
      getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo,
                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_performance_query ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_EXT_debug_utils ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT(
      const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT(
      const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
    //=== VK_ANDROID_external_memory_android_hardware_buffer ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID(
      const struct AHardwareBuffer *                                 buffer,
      VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
      getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
      getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID(
      const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
      struct AHardwareBuffer **                                               pBuffer,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<struct AHardwareBuffer *>::type
      getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_ANDROID_KHR*/

    //=== VK_KHR_get_memory_requirements2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
                                         VULKAN_HPP_NAMESPACE::MemoryRequirements2 *                pMemoryRequirements,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR(
      const ImageMemoryRequirementsInfo2 & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR(
      const ImageMemoryRequirementsInfo2 & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
                                          VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR(
      const BufferMemoryRequirementsInfo2 & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR(
      const BufferMemoryRequirementsInfo2 & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getImageSparseMemoryRequirements2KHR(
      const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
      uint32_t *                                                       pSparseMemoryRequirementCount,
      VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *           pSparseMemoryRequirements,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
              typename Dispatch                                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
                         getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info,
                                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>,
              typename Dispatch                                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                                       = SparseImageMemoryRequirements2Allocator,
              typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value,
                                      int>::type               = 0>
    VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
                         getImageSparseMemoryRequirements2KHR(
                           const ImageSparseMemoryRequirementsInfo2 & info,
                           SparseImageMemoryRequirements2Allocator &  sparseImageMemoryRequirements2Allocator,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_acceleration_structure ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR(
      const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                pAllocator,
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *                 pAccelerationStructure,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
      createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo,
                                      Optional<const AllocationCallbacks>        allocator
                                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
      createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo,
                                            Optional<const AllocationCallbacks>        allocator
                                                                                       VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR    accelerationStructure,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyAccelerationStructureKHR(
      VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR    accelerationStructure,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                   deferredOperation,
      uint32_t                                                                     infoCount,
      const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *      pInfos,
      const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Result buildAccelerationStructuresKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                                     deferredOperation,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const &      infos,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                     deferredOperation,
      const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
                                                       const CopyAccelerationStructureInfoKHR &   info,
                                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                             deferredOperation,
      const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR       deferredOperation,
                                                               const CopyAccelerationStructureToMemoryInfoKHR & info,
                                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                             deferredOperation,
      const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR       deferredOperation,
                                                               const CopyMemoryToAccelerationStructureInfoKHR & info,
                                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR(
      uint32_t                                               accelerationStructureCount,
      const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
      VULKAN_HPP_NAMESPACE::QueryType                        queryType,
      size_t                                                 dataSize,
      void *                                                 pData,
      size_t                                                 stride,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      writeAccelerationStructuresPropertiesKHR(
        ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
        VULKAN_HPP_NAMESPACE::QueryType                                          queryType,
        ArrayProxy<T> const &                                                    data,
        size_t                                                                   stride,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T,
              typename Allocator = std::allocator<T>,
              typename Dispatch  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
      writeAccelerationStructuresPropertiesKHR(
        ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
        VULKAN_HPP_NAMESPACE::QueryType                                          queryType,
        size_t                                                                   dataSize,
        size_t                                                                   stride,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type writeAccelerationStructuresPropertyKHR(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
      VULKAN_HPP_NAMESPACE::QueryType                                          queryType,
      size_t                                                                   stride,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    DeviceAddress getAccelerationStructureAddressKHR(
      const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    DeviceAddress getAccelerationStructureAddressKHR(
      const AccelerationStructureDeviceAddressInfoKHR & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getAccelerationStructureCompatibilityKHR(
      const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
      VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR *     pCompatibility,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
                         getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo,
                                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getAccelerationStructureBuildSizesKHR(
      VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR                 buildType,
      const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
      const uint32_t *                                                        pMaxPrimitiveCounts,
      VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR *          pSizeInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
                         getAccelerationStructureBuildSizesKHR(
                           VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
                           const AccelerationStructureBuildGeometryInfoKHR &       buildInfo,
                           ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_sampler_ycbcr_conversion ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR(
      const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *              pAllocator,
      VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion *                 pYcbcrConversion,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
      createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo,
                                       Optional<const AllocationCallbacks>      allocator
                                                                                VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
      createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
                                             Optional<const AllocationCallbacks>      allocator
                                                                                      VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion      ycbcrConversion,
                                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySamplerYcbcrConversionKHR(
      VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_bind_memory2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         bindBufferMemory2KHR( uint32_t                                           bindInfoCount,
                                               const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         bindImageMemory2KHR( uint32_t                                          bindInfoCount,
                                              const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_image_drm_format_modifier ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT(
      VULKAN_HPP_NAMESPACE::Image                                 image,
      VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
      getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_validation_cache ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
                                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks *          pAllocator,
                                                   VULKAN_HPP_NAMESPACE::ValidationCacheEXT *                 pValidationCache,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT(
      const ValidationCacheCreateInfoEXT & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
      createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo,
                                      Optional<const AllocationCallbacks>  allocator
                                                                           VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT          validationCache,
                                    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyValidationCacheEXT(
      VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT          validationCache,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT         dstCache,
                                                   uint32_t                                         srcCacheCount,
                                                   const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT                           dstCache,
                                ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT(
      VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
      size_t *                                 pDataSize,
      void *                                   pData,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Uint8_tAllocator = std::allocator<uint8_t>,
              typename Dispatch         = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
      getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Uint8_tAllocator = std::allocator<uint8_t>,
              typename Dispatch         = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                = Uint8_tAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
      getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
                                 Uint8_tAllocator &                       uint8_tAllocator,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_NV_ray_tracing ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createAccelerationStructureNV(
      const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *               pAllocator,
      VULKAN_HPP_NAMESPACE::AccelerationStructureNV *                 pAccelerationStructure,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV(
      const AccelerationStructureCreateInfoNV & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
      createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo,
                                           Optional<const AllocationCallbacks>       allocator
                                                                                     VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV     accelerationStructure,
                                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyAccelerationStructureNV(
      VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV     accelerationStructure,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getAccelerationStructureMemoryRequirementsNV(
      const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
      VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR *                              pMemoryRequirements,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(
      const AccelerationStructureMemoryRequirementsInfoNV & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(
      const AccelerationStructureMemoryRequirementsInfoNV & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV(
      uint32_t                                                            bindInfoCount,
      const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryNV(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV(
      VULKAN_HPP_NAMESPACE::PipelineCache                          pipelineCache,
      uint32_t                                                     createInfoCount,
      const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *            pAllocator,
      VULKAN_HPP_NAMESPACE::Pipeline *                             pPipelines,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PipelineAllocator = std::allocator<Pipeline>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(
      VULKAN_HPP_NAMESPACE::PipelineCache                                            pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PipelineAllocator = std::allocator<Pipeline>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                 = PipelineAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV(
      VULKAN_HPP_NAMESPACE::PipelineCache                                            pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
      Optional<const AllocationCallbacks>                                            allocator,
      PipelineAllocator &                                                            pipelineAllocator,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV(
      VULKAN_HPP_NAMESPACE::PipelineCache                          pipelineCache,
      const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                         createRayTracingPipelinesNVUnique(
                           VULKAN_HPP_NAMESPACE::PipelineCache                                            pipelineCache,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename PipelineAllocator         = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
              typename B                         = PipelineAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
                                      int>::type = 0>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                         createRayTracingPipelinesNVUnique(
                           VULKAN_HPP_NAMESPACE::PipelineCache                                            pipelineCache,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
                           Optional<const AllocationCallbacks>                                            allocator,
                           PipelineAllocator &                                                            pipelineAllocator,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique(
      VULKAN_HPP_NAMESPACE::PipelineCache                          pipelineCache,
      const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV(
      VULKAN_HPP_NAMESPACE::Pipeline pipeline,
      uint32_t                       firstGroup,
      uint32_t                       groupCount,
      size_t                         dataSize,
      void *                         pData,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                         uint32_t                       firstGroup,
                                         uint32_t                       groupCount,
                                         ArrayProxy<T> const &          data,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T,
              typename Allocator = std::allocator<T>,
              typename Dispatch  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
      getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                         uint32_t                       firstGroup,
                                         uint32_t                       groupCount,
                                         size_t                         dataSize,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
      getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                        uint32_t                       firstGroup,
                                        uint32_t                       groupCount,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV(
      VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
      size_t                                        dataSize,
      void *                                        pData,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
                                        ArrayProxy<T> const &                         data,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T,
              typename Allocator = std::allocator<T>,
              typename Dispatch  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
      getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
                                        size_t                                        dataSize,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
      getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                            uint32_t                       shader,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                         uint32_t                       shader,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_maintenance3 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
                                           VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport *          pSupport,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR(
      const DescriptorSetLayoutCreateInfo & createInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR(
      const DescriptorSetLayoutCreateInfo & createInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_external_memory_host ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT(
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
      const void *                                           pHostPointer,
      VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
      getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                         const void *                                           pHostPointer,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_calibrated_timestamps ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT(
      uint32_t                                                 timestampCount,
      const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
      uint64_t *                                               pTimestamps,
      uint64_t *                                               pMaxDeviation,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getCalibratedTimestampsEXT(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
      ArrayProxy<uint64_t> const &                                               timestamps,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Uint64_tAllocator = std::allocator<uint64_t>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
      getCalibratedTimestampsEXT(
        ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Uint64_tAllocator = std::allocator<uint64_t>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                 = Uint64_tAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
      getCalibratedTimestampsEXT(
        ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
        Uint64_tAllocator &                                                        uint64_tAllocator,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_timeline_semaphore ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR(
      VULKAN_HPP_NAMESPACE::Semaphore semaphore,
      uint64_t *                      pValue,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type
      getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
                                            uint64_t                                        timeout,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo,
                                                   uint64_t                  timeout,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_INTEL_performance_query ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL(
      const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL(
      const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
      VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL *                  pConfiguration,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
      acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
      acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL(
      VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releasePerformanceConfigurationINTEL(
      VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL(
      VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
      VULKAN_HPP_NAMESPACE::PerformanceValueINTEL *       pValue,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
      getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_AMD_display_native_hdr ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
                             VULKAN_HPP_NAMESPACE::Bool32       localDimmingEnable,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_EXT_buffer_device_address ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    DeviceAddress
      getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    DeviceAddress
      getBufferAddressEXT( const BufferDeviceAddressInfo & info,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_EXT_full_screen_exclusive ===

#  ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT(
      VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#  ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT(
      VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
         releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
      VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR *      pModes,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
      getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_KHR_buffer_device_address ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    DeviceAddress
      getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    DeviceAddress
      getBufferAddressKHR( const BufferDeviceAddressInfo & info,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_host_query_reset ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                            uint32_t                        firstQuery,
                            uint32_t                        queryCount,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;

    //=== VK_KHR_deferred_host_operations ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createDeferredOperationKHR(
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR *      pDeferredOperation,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR(
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
      createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator
                                                                            VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR        operation,
                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDeferredOperationKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR        operation,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
                                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
                                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_pipeline_executable_properties ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR(
      const VULKAN_HPP_NAMESPACE::PipelineInfoKHR *           pPipelineInfo,
      uint32_t *                                              pExecutableCount,
      VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>,
              typename Dispatch                                 = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
      getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>,
              typename Dispatch                                 = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                                        = PipelineExecutablePropertiesKHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value,
                                      int>::type                = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
      getPipelineExecutablePropertiesKHR(
        const PipelineInfoKHR &                    pipelineInfo,
        PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR(
      const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
      uint32_t *                                              pStatisticCount,
      VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR *  pStatistics,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>,
              typename Dispatch                                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
      getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>,
              typename Dispatch                                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                                       = PipelineExecutableStatisticKHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value,
                                      int>::type               = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
      getPipelineExecutableStatisticsKHR(
        const PipelineExecutableInfoKHR &         executableInfo,
        PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR(
      const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR *             pExecutableInfo,
      uint32_t *                                                          pInternalRepresentationCount,
      VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PipelineExecutableInternalRepresentationKHRAllocator =
                std::allocator<PipelineExecutableInternalRepresentationKHR>,
              typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
                                           PipelineExecutableInternalRepresentationKHRAllocator>>::type
      getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo,
                                                       Dispatch const &                  d
                                                                                         VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename PipelineExecutableInternalRepresentationKHRAllocator =
        std::allocator<PipelineExecutableInternalRepresentationKHR>,
      typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                         = PipelineExecutableInternalRepresentationKHRAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value,
                              int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
                                           PipelineExecutableInternalRepresentationKHRAllocator>>::type
      getPipelineExecutableInternalRepresentationsKHR(
        const PipelineExecutableInfoKHR &                      executableInfo,
        PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_NV_device_generated_commands ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getGeneratedCommandsMemoryRequirementsNV(
      const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
      VULKAN_HPP_NAMESPACE::MemoryRequirements2 *                             pMemoryRequirements,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV(
      const GeneratedCommandsMemoryRequirementsInfoNV & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV(
      const GeneratedCommandsMemoryRequirementsInfoNV & info,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV(
      const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                pAllocator,
      VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV *                 pIndirectCommandsLayout,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
      createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo,
                                      Optional<const AllocationCallbacks>        allocator
                                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
      createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo,
                                            Optional<const AllocationCallbacks>        allocator
                                                                                       VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV    indirectCommandsLayout,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyIndirectCommandsLayoutNV(
      VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV    indirectCommandsLayout,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_private_data ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo,
                                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks *          pAllocator,
                                                   VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT *                 pPrivateDataSlot,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type createPrivateDataSlotEXT(
      const PrivateDataSlotCreateInfoEXT & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type
      createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo,
                                      Optional<const AllocationCallbacks>  allocator
                                                                           VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT          privateDataSlot,
                                    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyPrivateDataSlotEXT(
      VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT          privateDataSlot,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType         objectType,
                                            uint64_t                                 objectHandle,
                                            VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                                            uint64_t                                 data,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
         setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType         objectType,
                            uint64_t                                 objectHandle,
                            VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                            uint64_t                                 data,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType         objectType,
                            uint64_t                                 objectHandle,
                            VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                            uint64_t *                               pData,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD uint64_t
                         getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType         objectType,
                                            uint64_t                                 objectHandle,
                                            VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_ray_tracing_pipeline ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                    deferredOperation,
      VULKAN_HPP_NAMESPACE::PipelineCache                           pipelineCache,
      uint32_t                                                      createInfoCount,
      const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *             pAllocator,
      VULKAN_HPP_NAMESPACE::Pipeline *                              pPipelines,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PipelineAllocator = std::allocator<Pipeline>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                      deferredOperation,
      VULKAN_HPP_NAMESPACE::PipelineCache                                             pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PipelineAllocator = std::allocator<Pipeline>,
              typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                 = PipelineAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                      deferredOperation,
      VULKAN_HPP_NAMESPACE::PipelineCache                                             pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks>                                             allocator,
      PipelineAllocator &                                                             pipelineAllocator,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                    deferredOperation,
      VULKAN_HPP_NAMESPACE::PipelineCache                           pipelineCache,
      const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                         createRayTracingPipelinesKHRUnique(
                           VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                      deferredOperation,
                           VULKAN_HPP_NAMESPACE::PipelineCache                                             pipelineCache,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename PipelineAllocator         = std::allocator<UniqueHandle<Pipeline, Dispatch>>,
              typename B                         = PipelineAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value,
                                      int>::type = 0>
    VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                         createRayTracingPipelinesKHRUnique(
                           VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                      deferredOperation,
                           VULKAN_HPP_NAMESPACE::PipelineCache                                             pipelineCache,
                           ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
                           Optional<const AllocationCallbacks>                                             allocator,
                           PipelineAllocator &                                                             pipelineAllocator,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                    deferredOperation,
      VULKAN_HPP_NAMESPACE::PipelineCache                           pipelineCache,
      const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR(
      VULKAN_HPP_NAMESPACE::Pipeline pipeline,
      uint32_t                       firstGroup,
      uint32_t                       groupCount,
      size_t                         dataSize,
      void *                         pData,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                          uint32_t                       firstGroup,
                                          uint32_t                       groupCount,
                                          ArrayProxy<T> const &          data,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T,
              typename Allocator = std::allocator<T>,
              typename Dispatch  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
      getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                          uint32_t                       firstGroup,
                                          uint32_t                       groupCount,
                                          size_t                         dataSize,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
      getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                         uint32_t                       firstGroup,
                                         uint32_t                       groupCount,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR(
      VULKAN_HPP_NAMESPACE::Pipeline pipeline,
      uint32_t                       firstGroup,
      uint32_t                       groupCount,
      size_t                         dataSize,
      void *                         pData,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                                       uint32_t                       firstGroup,
                                                       uint32_t                       groupCount,
                                                       ArrayProxy<T> const &          data,
                                                       Dispatch const &               d
                                                                                      VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T,
              typename Allocator = std::allocator<T>,
              typename Dispatch  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T, Allocator>>::type
      getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                                       uint32_t                       firstGroup,
                                                       uint32_t                       groupCount,
                                                       size_t                         dataSize,
                                                       Dispatch const &               d
                                                                                      VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type
      getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                                      uint32_t                       firstGroup,
                                                      uint32_t                       groupCount,
                                                      Dispatch const &               d
                                                                                     VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline             pipeline,
                                                     uint32_t                                   group,
                                                     VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
                                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;

#if defined( VK_USE_PLATFORM_FUCHSIA )
    //=== VK_FUCHSIA_external_memory ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA(
      const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
      zx_handle_t *                                                  pZirconHandle,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<zx_handle_t>::type
      getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getMemoryZirconHandlePropertiesFUCHSIA(
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits      handleType,
      zx_handle_t                                                 zirconHandle,
      VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
      getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                              zx_handle_t                                            zirconHandle,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
    //=== VK_FUCHSIA_external_semaphore ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result importSemaphoreZirconHandleFUCHSIA(
      const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreZirconHandleFUCHSIA(
      const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA(
      const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
      zx_handle_t *                                                     pZirconHandle,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<zx_handle_t>::type
      getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_FUCHSIA*/

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT
    {
      return m_device;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_device != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_device == VK_NULL_HANDLE;
    }

  private:
    VkDevice m_device = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDevice>
  {
    using type = VULKAN_HPP_NAMESPACE::Device;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDevice>
  {
    using Type = VULKAN_HPP_NAMESPACE::Device;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice>
  {
    using Type = VULKAN_HPP_NAMESPACE::Device;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Device>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct DisplayModeParametersKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {},
                                                   uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT
      : visibleRegion( visibleRegion_ )
      , refreshRate( refreshRate_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayModeParametersKHR( *reinterpret_cast<DisplayModeParametersKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR &
                            operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>( &rhs );
      return *this;
    }

    DisplayModeParametersKHR &
      setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
    {
      visibleRegion = visibleRegion_;
      return *this;
    }

    DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
    {
      refreshRate = refreshRate_;
      return *this;
    }

    operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayModeParametersKHR *>( this );
    }

    operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayModeParametersKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModeParametersKHR const & ) const = default;
#else
    bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate );
    }

    bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
    uint32_t                       refreshRate   = {};
  };
  static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );

  struct DisplayModeCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_      = {},
                                VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR  parameters_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , parameters( parameters_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayModeCreateInfoKHR( *reinterpret_cast<DisplayModeCreateInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR &
                            operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>( &rhs );
      return *this;
    }

    DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DisplayModeCreateInfoKHR &
      setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
    {
      parameters = parameters_;
      return *this;
    }

    operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( this );
    }

    operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayModeCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default;
#else
    bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( parameters == rhs.parameters );
    }

    bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType             sType      = StructureType::eDisplayModeCreateInfoKHR;
    const void *                                    pNext      = {};
    VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags      = {};
    VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR  parameters = {};
  };
  static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
  {
    using Type = DisplayModeCreateInfoKHR;
  };

  class DisplayModeKHR
  {
  public:
    using CType = VkDisplayModeKHR;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;

  public:
    VULKAN_HPP_CONSTEXPR         DisplayModeKHR() = default;
    VULKAN_HPP_CONSTEXPR         DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
      : m_displayModeKHR( displayModeKHR )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DisplayModeKHR & operator=( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
    {
      m_displayModeKHR = displayModeKHR;
      return *this;
    }
#endif

    DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_displayModeKHR = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModeKHR const & ) const = default;
#else
    bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_displayModeKHR == rhs.m_displayModeKHR;
    }

    bool operator!=( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_displayModeKHR != rhs.m_displayModeKHR;
    }

    bool operator<( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_displayModeKHR < rhs.m_displayModeKHR;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT
    {
      return m_displayModeKHR;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_displayModeKHR != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_displayModeKHR == VK_NULL_HANDLE;
    }

  private:
    VkDisplayModeKHR m_displayModeKHR = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDisplayModeKHR>
  {
    using type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR>
  {
    using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct ExtensionProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      ExtensionProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & extensionName_ = {},
                           uint32_t                                             specVersion_ = {} ) VULKAN_HPP_NOEXCEPT
      : extensionName( extensionName_ )
      , specVersion( specVersion_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExtensionProperties( *reinterpret_cast<ExtensionProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExtensionProperties &
                            operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>( &rhs );
      return *this;
    }

    operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExtensionProperties *>( this );
    }

    operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExtensionProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExtensionProperties const & ) const = default;
#else
    bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion );
    }

    bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
    uint32_t                                                               specVersion   = {};
  };
  static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExtensionProperties>::value, "struct wrapper is not a standard layout!" );

  struct LayerProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      LayerProperties( std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layerName_             = {},
                       uint32_t                                             specVersion_           = {},
                       uint32_t                                             implementationVersion_ = {},
                       std::array<char, VK_MAX_DESCRIPTION_SIZE> const &    description_ = {} ) VULKAN_HPP_NOEXCEPT
      : layerName( layerName_ )
      , specVersion( specVersion_ )
      , implementationVersion( implementationVersion_ )
      , description( description_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>( &rhs );
      return *this;
    }

    operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkLayerProperties *>( this );
    }

    operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkLayerProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( LayerProperties const & ) const = default;
#else
    bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) &&
             ( implementationVersion == rhs.implementationVersion ) && ( description == rhs.description );
    }

    bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName             = {};
    uint32_t                                                               specVersion           = {};
    uint32_t                                                               implementationVersion = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE>    description           = {};
  };
  static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<LayerProperties>::value, "struct wrapper is not a standard layout!" );

  struct PerformanceCounterKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(
      VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric,
      VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ =
        VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer,
      VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ =
        VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32,
      std::array<uint8_t, VK_UUID_SIZE> const & uuid_ = {} ) VULKAN_HPP_NOEXCEPT
      : unit( unit_ )
      , scope( scope_ )
      , storage( storage_ )
      , uuid( uuid_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceCounterKHR( *reinterpret_cast<PerformanceCounterKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR &
                            operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>( &rhs );
      return *this;
    }

    operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceCounterKHR *>( this );
    }

    operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceCounterKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceCounterKHR const & ) const = default;
#else
    bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) &&
             ( storage == rhs.storage ) && ( uuid == rhs.uuid );
    }

    bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType = StructureType::ePerformanceCounterKHR;
    const void *                                     pNext = {};
    VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR  unit  = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
    VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope =
      VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
    VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage =
      VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
  };
  static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
  {
    using Type = PerformanceCounterKHR;
  };

  struct PerformanceCounterDescriptionKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(
      VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_       = {},
      std::array<char, VK_MAX_DESCRIPTION_SIZE> const &           name_        = {},
      std::array<char, VK_MAX_DESCRIPTION_SIZE> const &           category_    = {},
      std::array<char, VK_MAX_DESCRIPTION_SIZE> const &           description_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , name( name_ )
      , category( category_ )
      , description( description_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceCounterDescriptionKHR( *reinterpret_cast<PerformanceCounterDescriptionKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR &
                            operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>( &rhs );
      return *this;
    }

    operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR *>( this );
    }

    operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default;
#else
    bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) &&
             ( category == rhs.category ) && ( description == rhs.description );
    }

    bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR         flags       = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name        = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category    = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
  };
  static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PerformanceCounterDescriptionKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
  {
    using Type = PerformanceCounterDescriptionKHR;
  };

  struct DisplayModePropertiesKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR           displayMode_ = {},
                                VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_  = {} ) VULKAN_HPP_NOEXCEPT
      : displayMode( displayMode_ )
      , parameters( parameters_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayModePropertiesKHR( *reinterpret_cast<DisplayModePropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayModePropertiesKHR &
                            operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayModePropertiesKHR *>( this );
    }

    operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayModePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModePropertiesKHR const & ) const = default;
#else
    bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters );
    }

    bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DisplayModeKHR           displayMode = {};
    VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters  = {};
  };
  static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );

  struct DisplayModeProperties2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(
      VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : displayModeProperties( displayModeProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayModeProperties2KHR( *reinterpret_cast<DisplayModeProperties2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayModeProperties2KHR &
                            operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayModeProperties2KHR *>( this );
    }

    operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayModeProperties2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayModeProperties2KHR const & ) const = default;
#else
    bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties );
    }

    bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType                 = StructureType::eDisplayModeProperties2KHR;
    void *                                         pNext                 = {};
    VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
  };
  static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayModeProperties2KHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
  {
    using Type = DisplayModeProperties2KHR;
  };

  struct DisplayPlaneInfo2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {},
                                               uint32_t planeIndex_                       = {} ) VULKAN_HPP_NOEXCEPT
      : mode( mode_ )
      , planeIndex( planeIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlaneInfo2KHR( *reinterpret_cast<DisplayPlaneInfo2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR &
                            operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>( &rhs );
      return *this;
    }

    DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      planeIndex = planeIndex_;
      return *this;
    }

    operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( this );
    }

    operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlaneInfo2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default;
#else
    bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) &&
             ( planeIndex == rhs.planeIndex );
    }

    bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType  sType      = StructureType::eDisplayPlaneInfo2KHR;
    const void *                         pNext      = {};
    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode       = {};
    uint32_t                             planeIndex = {};
  };
  static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
  {
    using Type = DisplayPlaneInfo2KHR;
  };

  struct DisplayPlaneCapabilitiesKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {},
                                   VULKAN_HPP_NAMESPACE::Offset2D                  minSrcPosition_ = {},
                                   VULKAN_HPP_NAMESPACE::Offset2D                  maxSrcPosition_ = {},
                                   VULKAN_HPP_NAMESPACE::Extent2D                  minSrcExtent_   = {},
                                   VULKAN_HPP_NAMESPACE::Extent2D                  maxSrcExtent_   = {},
                                   VULKAN_HPP_NAMESPACE::Offset2D                  minDstPosition_ = {},
                                   VULKAN_HPP_NAMESPACE::Offset2D                  maxDstPosition_ = {},
                                   VULKAN_HPP_NAMESPACE::Extent2D                  minDstExtent_   = {},
                                   VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT
      : supportedAlpha( supportedAlpha_ )
      , minSrcPosition( minSrcPosition_ )
      , maxSrcPosition( maxSrcPosition_ )
      , minSrcExtent( minSrcExtent_ )
      , maxSrcExtent( maxSrcExtent_ )
      , minDstPosition( minDstPosition_ )
      , maxDstPosition( maxDstPosition_ )
      , minDstExtent( minDstExtent_ )
      , maxDstExtent( maxDstExtent_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlaneCapabilitiesKHR( *reinterpret_cast<DisplayPlaneCapabilitiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneCapabilitiesKHR &
                            operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR *>( this );
    }

    operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default;
#else
    bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) &&
             ( maxSrcPosition == rhs.maxSrcPosition ) && ( minSrcExtent == rhs.minSrcExtent ) &&
             ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) &&
             ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) &&
             ( maxDstExtent == rhs.maxDstExtent );
    }

    bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
    VULKAN_HPP_NAMESPACE::Offset2D                  minSrcPosition = {};
    VULKAN_HPP_NAMESPACE::Offset2D                  maxSrcPosition = {};
    VULKAN_HPP_NAMESPACE::Extent2D                  minSrcExtent   = {};
    VULKAN_HPP_NAMESPACE::Extent2D                  maxSrcExtent   = {};
    VULKAN_HPP_NAMESPACE::Offset2D                  minDstPosition = {};
    VULKAN_HPP_NAMESPACE::Offset2D                  maxDstPosition = {};
    VULKAN_HPP_NAMESPACE::Extent2D                  minDstExtent   = {};
    VULKAN_HPP_NAMESPACE::Extent2D                  maxDstExtent   = {};
  };
  static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayPlaneCapabilitiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  struct DisplayPlaneCapabilities2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(
      VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT
      : capabilities( capabilities_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlaneCapabilities2KHR( *reinterpret_cast<DisplayPlaneCapabilities2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneCapabilities2KHR &
                            operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR *>( this );
    }

    operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default;
#else
    bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities );
    }

    bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType        = StructureType::eDisplayPlaneCapabilities2KHR;
    void *                                            pNext        = {};
    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
  };
  static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayPlaneCapabilities2KHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
  {
    using Type = DisplayPlaneCapabilities2KHR;
  };

  struct DisplayPlanePropertiesKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {},
                                                    uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : currentDisplay( currentDisplay_ )
      , currentStackIndex( currentStackIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlanePropertiesKHR( *reinterpret_cast<DisplayPlanePropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayPlanePropertiesKHR &
                            operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlanePropertiesKHR *>( this );
    }

    operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlanePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default;
#else
    bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex );
    }

    bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay    = {};
    uint32_t                         currentStackIndex = {};
  };
  static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayPlanePropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  struct DisplayPlaneProperties2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(
      VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : displayPlaneProperties( displayPlaneProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPlaneProperties2KHR( *reinterpret_cast<DisplayPlaneProperties2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayPlaneProperties2KHR &
                            operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPlaneProperties2KHR *>( this );
    }

    operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPlaneProperties2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default;
#else
    bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( displayPlaneProperties == rhs.displayPlaneProperties );
    }

    bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType             sType                  = StructureType::eDisplayPlaneProperties2KHR;
    void *                                          pNext                  = {};
    VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
  };
  static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayPlaneProperties2KHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
  {
    using Type = DisplayPlaneProperties2KHR;
  };

  struct DisplayPropertiesKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR               display_              = {},
                            const char *                                   displayName_          = {},
                            VULKAN_HPP_NAMESPACE::Extent2D                 physicalDimensions_   = {},
                            VULKAN_HPP_NAMESPACE::Extent2D                 physicalResolution_   = {},
                            VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_  = {},
                            VULKAN_HPP_NAMESPACE::Bool32                   planeReorderPossible_ = {},
                            VULKAN_HPP_NAMESPACE::Bool32                   persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT
      : display( display_ )
      , displayName( displayName_ )
      , physicalDimensions( physicalDimensions_ )
      , physicalResolution( physicalResolution_ )
      , supportedTransforms( supportedTransforms_ )
      , planeReorderPossible( planeReorderPossible_ )
      , persistentContent( persistentContent_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPropertiesKHR( *reinterpret_cast<DisplayPropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayPropertiesKHR &
                            operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPropertiesKHR *>( this );
    }

    operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPropertiesKHR const & ) const = default;
#else
    bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( display == rhs.display ) && ( displayName == rhs.displayName ) &&
             ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) &&
             ( supportedTransforms == rhs.supportedTransforms ) &&
             ( planeReorderPossible == rhs.planeReorderPossible ) && ( persistentContent == rhs.persistentContent );
    }

    bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DisplayKHR               display              = {};
    const char *                                   displayName          = {};
    VULKAN_HPP_NAMESPACE::Extent2D                 physicalDimensions   = {};
    VULKAN_HPP_NAMESPACE::Extent2D                 physicalResolution   = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms  = {};
    VULKAN_HPP_NAMESPACE::Bool32                   planeReorderPossible = {};
    VULKAN_HPP_NAMESPACE::Bool32                   persistentContent    = {};
  };
  static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );

  struct DisplayProperties2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : displayProperties( displayProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayProperties2KHR( *reinterpret_cast<DisplayProperties2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayProperties2KHR &
                            operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>( &rhs );
      return *this;
    }

    operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayProperties2KHR *>( this );
    }

    operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayProperties2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayProperties2KHR const & ) const = default;
#else
    bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties );
    }

    bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType        sType             = StructureType::eDisplayProperties2KHR;
    void *                                     pNext             = {};
    VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
  };
  static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
  {
    using Type = DisplayProperties2KHR;
  };

  struct PhysicalDeviceExternalBufferInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(
      VULKAN_HPP_NAMESPACE::BufferCreateFlags                flags_ = {},
      VULKAN_HPP_NAMESPACE::BufferUsageFlags                 usage_ = {},
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
        VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , usage( usage_ )
      , handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalBufferInfo( *reinterpret_cast<PhysicalDeviceExternalBufferInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo &
                            operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>( &rhs );
      return *this;
    }

    PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    PhysicalDeviceExternalBufferInfo &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( this );
    }

    operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) &&
             ( handleType == rhs.handleType );
    }

    bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType = StructureType::ePhysicalDeviceExternalBufferInfo;
    const void *                                           pNext = {};
    VULKAN_HPP_NAMESPACE::BufferCreateFlags                flags = {};
    VULKAN_HPP_NAMESPACE::BufferUsageFlags                 usage = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceExternalBufferInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
  {
    using Type = PhysicalDeviceExternalBufferInfo;
  };
  using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;

  struct ExternalMemoryProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(
      VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags    externalMemoryFeatures_        = {},
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {},
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_         = {} ) VULKAN_HPP_NOEXCEPT
      : externalMemoryFeatures( externalMemoryFeatures_ )
      , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
      , compatibleHandleTypes( compatibleHandleTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryProperties( *reinterpret_cast<ExternalMemoryProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryProperties &
                            operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryProperties *>( this );
    }

    operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryProperties const & ) const = default;
#else
    bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) &&
             ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
             ( compatibleHandleTypes == rhs.compatibleHandleTypes );
    }

    bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags    externalMemoryFeatures        = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes         = {};
  };
  static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
  using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;

  struct ExternalBufferProperties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalBufferProperties(
      VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : externalMemoryProperties( externalMemoryProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalBufferProperties( *reinterpret_cast<ExternalBufferProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalBufferProperties &
                            operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalBufferProperties *>( this );
    }

    operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalBufferProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalBufferProperties const & ) const = default;
#else
    bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( externalMemoryProperties == rhs.externalMemoryProperties );
    }

    bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType                    = StructureType::eExternalBufferProperties;
    void *                                         pNext                    = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
  };
  static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExternalBufferProperties>
  {
    using Type = ExternalBufferProperties;
  };
  using ExternalBufferPropertiesKHR = ExternalBufferProperties;

  struct PhysicalDeviceExternalFenceInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(
      VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ =
        VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalFenceInfo( *reinterpret_cast<PhysicalDeviceExternalFenceInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo &
                            operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>( &rhs );
      return *this;
    }

    PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceExternalFenceInfo &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( this );
    }

    operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
    }

    bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType = StructureType::ePhysicalDeviceExternalFenceInfo;
    const void *                                          pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceExternalFenceInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
  {
    using Type = PhysicalDeviceExternalFenceInfo;
  };
  using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;

  struct ExternalFenceProperties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalFenceProperties(
      VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {},
      VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_         = {},
      VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags    externalFenceFeatures_         = {} ) VULKAN_HPP_NOEXCEPT
      : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
      , compatibleHandleTypes( compatibleHandleTypes_ )
      , externalFenceFeatures( externalFenceFeatures_ )
    {}

    VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalFenceProperties( *reinterpret_cast<ExternalFenceProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalFenceProperties &
                            operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalFenceProperties *>( this );
    }

    operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalFenceProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalFenceProperties const & ) const = default;
#else
    bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
             ( compatibleHandleTypes == rhs.compatibleHandleTypes ) &&
             ( externalFenceFeatures == rhs.externalFenceFeatures );
    }

    bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType = StructureType::eExternalFenceProperties;
    void *                                             pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes         = {};
    VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags    externalFenceFeatures         = {};
  };
  static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExternalFenceProperties>
  {
    using Type = ExternalFenceProperties;
  };
  using ExternalFencePropertiesKHR = ExternalFenceProperties;

  struct ImageFormatProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D         maxExtent_       = {},
                             uint32_t                               maxMipLevels_    = {},
                             uint32_t                               maxArrayLayers_  = {},
                             VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_    = {},
                             VULKAN_HPP_NAMESPACE::DeviceSize       maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxExtent( maxExtent_ )
      , maxMipLevels( maxMipLevels_ )
      , maxArrayLayers( maxArrayLayers_ )
      , sampleCounts( sampleCounts_ )
      , maxResourceSize( maxResourceSize_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageFormatProperties( *reinterpret_cast<ImageFormatProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageFormatProperties &
                            operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>( &rhs );
      return *this;
    }

    operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageFormatProperties *>( this );
    }

    operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageFormatProperties const & ) const = default;
#else
    bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) &&
             ( maxArrayLayers == rhs.maxArrayLayers ) && ( sampleCounts == rhs.sampleCounts ) &&
             ( maxResourceSize == rhs.maxResourceSize );
    }

    bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Extent3D         maxExtent       = {};
    uint32_t                               maxMipLevels    = {};
    uint32_t                               maxArrayLayers  = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts    = {};
    VULKAN_HPP_NAMESPACE::DeviceSize       maxResourceSize = {};
  };
  static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );

  struct ExternalImageFormatPropertiesNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(
      VULKAN_HPP_NAMESPACE::ImageFormatProperties           imageFormatProperties_         = {},
      VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV    externalMemoryFeatures_        = {},
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {},
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_         = {} ) VULKAN_HPP_NOEXCEPT
      : imageFormatProperties( imageFormatProperties_ )
      , externalMemoryFeatures( externalMemoryFeatures_ )
      , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
      , compatibleHandleTypes( compatibleHandleTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalImageFormatPropertiesNV( *reinterpret_cast<ExternalImageFormatPropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalImageFormatPropertiesNV &
                            operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalImageFormatPropertiesNV *>( this );
    }

    operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalImageFormatPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default;
#else
    bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( imageFormatProperties == rhs.imageFormatProperties ) &&
             ( externalMemoryFeatures == rhs.externalMemoryFeatures ) &&
             ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
             ( compatibleHandleTypes == rhs.compatibleHandleTypes );
    }

    bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ImageFormatProperties           imageFormatProperties         = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV    externalMemoryFeatures        = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes         = {};
  };
  static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalImageFormatPropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  struct PhysicalDeviceExternalSemaphoreInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceExternalSemaphoreInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ =
        VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast<PhysicalDeviceExternalSemaphoreInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo &
                            operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalSemaphoreInfo &
      operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
      return *this;
    }

    PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceExternalSemaphoreInfo &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( this );
    }

    operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
    }

    bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceExternalSemaphoreInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
  {
    using Type = PhysicalDeviceExternalSemaphoreInfo;
  };
  using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;

  struct ExternalSemaphoreProperties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {},
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_         = {},
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags    externalSemaphoreFeatures_     = {} ) VULKAN_HPP_NOEXCEPT
      : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ )
      , compatibleHandleTypes( compatibleHandleTypes_ )
      , externalSemaphoreFeatures( externalSemaphoreFeatures_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalSemaphoreProperties( *reinterpret_cast<ExternalSemaphoreProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalSemaphoreProperties &
                            operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalSemaphoreProperties *>( this );
    }

    operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalSemaphoreProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalSemaphoreProperties const & ) const = default;
#else
    bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) &&
             ( compatibleHandleTypes == rhs.compatibleHandleTypes ) &&
             ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
    }

    bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType = StructureType::eExternalSemaphoreProperties;
    void *                                                 pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes         = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags    externalSemaphoreFeatures     = {};
  };
  static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalSemaphoreProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
  {
    using Type = ExternalSemaphoreProperties;
  };
  using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;

  struct PhysicalDeviceFeatures2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT
      : features( features_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFeatures2( *reinterpret_cast<PhysicalDeviceFeatures2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 &
                            operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>( &rhs );
      return *this;
    }

    PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceFeatures2 &
      setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
    {
      features = features_;
      return *this;
    }

    operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFeatures2 *>( this );
    }

    operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFeatures2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features );
    }

    bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType    = StructureType::ePhysicalDeviceFeatures2;
    void *                                       pNext    = {};
    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
  };
  static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
  {
    using Type = PhysicalDeviceFeatures2;
  };
  using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;

  struct FormatProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_  = {},
                        VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {},
                        VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_        = {} ) VULKAN_HPP_NOEXCEPT
      : linearTilingFeatures( linearTilingFeatures_ )
      , optimalTilingFeatures( optimalTilingFeatures_ )
      , bufferFeatures( bufferFeatures_ )
    {}

    VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : FormatProperties( *reinterpret_cast<FormatProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>( &rhs );
      return *this;
    }

    operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFormatProperties *>( this );
    }

    operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FormatProperties const & ) const = default;
#else
    bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( linearTilingFeatures == rhs.linearTilingFeatures ) &&
             ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures );
    }

    bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures  = {};
    VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
    VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures        = {};
  };
  static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FormatProperties>::value, "struct wrapper is not a standard layout!" );

  struct FormatProperties2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : formatProperties( formatProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : FormatProperties2( *reinterpret_cast<FormatProperties2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FormatProperties2 &
                            operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>( &rhs );
      return *this;
    }

    operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFormatProperties2 *>( this );
    }

    operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFormatProperties2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FormatProperties2 const & ) const = default;
#else
    bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties );
    }

    bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType            = StructureType::eFormatProperties2;
    void *                                 pNext            = {};
    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
  };
  static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FormatProperties2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFormatProperties2>
  {
    using Type = FormatProperties2;
  };
  using FormatProperties2KHR = FormatProperties2;

  struct PhysicalDeviceFragmentShadingRateKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentShadingRateKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {},
                                            VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : sampleCounts( sampleCounts_ )
      , fragmentSize( fragmentSize_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateKHR &
                            operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateKHR &
      operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) &&
             ( fragmentSize == rhs.fragmentSize );
    }

    bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType        = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
    void *                                 pNext        = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
    VULKAN_HPP_NAMESPACE::Extent2D         fragmentSize = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateKHR>
  {
    using Type = PhysicalDeviceFragmentShadingRateKHR;
  };

  struct PhysicalDeviceImageFormatInfo2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(
      VULKAN_HPP_NAMESPACE::Format           format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      VULKAN_HPP_NAMESPACE::ImageType        type_   = VULKAN_HPP_NAMESPACE::ImageType::e1D,
      VULKAN_HPP_NAMESPACE::ImageTiling      tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal,
      VULKAN_HPP_NAMESPACE::ImageUsageFlags  usage_  = {},
      VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_  = {} ) VULKAN_HPP_NOEXCEPT
      : format( format_ )
      , type( type_ )
      , tiling( tiling_ )
      , usage( usage_ )
      , flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageFormatInfo2( *reinterpret_cast<PhysicalDeviceImageFormatInfo2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 &
                            operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>( &rhs );
      return *this;
    }

    PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
    {
      tiling = tiling_;
      return *this;
    }

    PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( this );
    }

    operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) &&
             ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( flags == rhs.flags );
    }

    bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType  = StructureType::ePhysicalDeviceImageFormatInfo2;
    const void *                           pNext  = {};
    VULKAN_HPP_NAMESPACE::Format           format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::ImageType        type   = VULKAN_HPP_NAMESPACE::ImageType::e1D;
    VULKAN_HPP_NAMESPACE::ImageTiling      tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
    VULKAN_HPP_NAMESPACE::ImageUsageFlags  usage  = {};
    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags  = {};
  };
  static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceImageFormatInfo2>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
  {
    using Type = PhysicalDeviceImageFormatInfo2;
  };
  using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;

  struct ImageFormatProperties2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageFormatProperties2(
      VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : imageFormatProperties( imageFormatProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageFormatProperties2( *reinterpret_cast<ImageFormatProperties2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageFormatProperties2 &
                            operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>( &rhs );
      return *this;
    }

    operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageFormatProperties2 *>( this );
    }

    operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageFormatProperties2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageFormatProperties2 const & ) const = default;
#else
    bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties );
    }

    bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType                 = StructureType::eImageFormatProperties2;
    void *                                      pNext                 = {};
    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
  };
  static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageFormatProperties2>
  {
    using Type = ImageFormatProperties2;
  };
  using ImageFormatProperties2KHR = ImageFormatProperties2;

  struct MemoryType
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {},
                                     uint32_t                                  heapIndex_     = {} ) VULKAN_HPP_NOEXCEPT
      : propertyFlags( propertyFlags_ )
      , heapIndex( heapIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryType( *reinterpret_cast<MemoryType const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>( &rhs );
      return *this;
    }

    operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryType *>( this );
    }

    operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryType *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryType const & ) const = default;
#else
    bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex );
    }

    bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
    uint32_t                                  heapIndex     = {};
  };
  static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryType>::value, "struct wrapper is not a standard layout!" );

  struct MemoryHeap
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize      size_  = {},
                                     VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT
      : size( size_ )
      , flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryHeap( *reinterpret_cast<MemoryHeap const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>( &rhs );
      return *this;
    }

    operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryHeap *>( this );
    }

    operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryHeap *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryHeap const & ) const = default;
#else
    bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( size == rhs.size ) && ( flags == rhs.flags );
    }

    bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::DeviceSize      size  = {};
    VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
  };
  static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryHeap>::value, "struct wrapper is not a standard layout!" );

  struct PhysicalDeviceMemoryProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(
      uint32_t                                                                  memoryTypeCount_ = {},
      std::array<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> const & memoryTypes_     = {},
      uint32_t                                                                  memoryHeapCount_ = {},
      std::array<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryTypeCount( memoryTypeCount_ )
      , memoryTypes( memoryTypes_ )
      , memoryHeapCount( memoryHeapCount_ )
      , memoryHeaps( memoryHeaps_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryProperties( *reinterpret_cast<PhysicalDeviceMemoryProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties &
                            operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties *>( this );
    }

    operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) &&
             ( memoryHeapCount == rhs.memoryHeapCount ) && ( memoryHeaps == rhs.memoryHeaps );
    }

    bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                                                                    memoryTypeCount = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> memoryTypes     = {};
    uint32_t                                                                                    memoryHeapCount = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps     = {};
  };
  static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties>::value,
                 "struct wrapper is not a standard layout!" );

  struct PhysicalDeviceMemoryProperties2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(
      VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryProperties( memoryProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryProperties2( *reinterpret_cast<PhysicalDeviceMemoryProperties2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2 &
                            operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2 *>( this );
    }

    operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties );
    }

    bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                  sType = StructureType::ePhysicalDeviceMemoryProperties2;
    void *                                               pNext = {};
    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
  };
  static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties2>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
  {
    using Type = PhysicalDeviceMemoryProperties2;
  };
  using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;

  struct MultisamplePropertiesEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxSampleLocationGridSize( maxSampleLocationGridSize_ )
    {}

    VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MultisamplePropertiesEXT( *reinterpret_cast<MultisamplePropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MultisamplePropertiesEXT &
                            operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMultisamplePropertiesEXT *>( this );
    }

    operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMultisamplePropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MultisamplePropertiesEXT const & ) const = default;
#else
    bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
    }

    bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                     = StructureType::eMultisamplePropertiesEXT;
    void *                              pNext                     = {};
    VULKAN_HPP_NAMESPACE::Extent2D      maxSampleLocationGridSize = {};
  };
  static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
  {
    using Type = MultisamplePropertiesEXT;
  };

  struct PhysicalDeviceLimits
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceLimits( uint32_t                         maxImageDimension1D_                             = {},
                            uint32_t                         maxImageDimension2D_                             = {},
                            uint32_t                         maxImageDimension3D_                             = {},
                            uint32_t                         maxImageDimensionCube_                           = {},
                            uint32_t                         maxImageArrayLayers_                             = {},
                            uint32_t                         maxTexelBufferElements_                          = {},
                            uint32_t                         maxUniformBufferRange_                           = {},
                            uint32_t                         maxStorageBufferRange_                           = {},
                            uint32_t                         maxPushConstantsSize_                            = {},
                            uint32_t                         maxMemoryAllocationCount_                        = {},
                            uint32_t                         maxSamplerAllocationCount_                       = {},
                            VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_                          = {},
                            VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_                          = {},
                            uint32_t                         maxBoundDescriptorSets_                          = {},
                            uint32_t                         maxPerStageDescriptorSamplers_                   = {},
                            uint32_t                         maxPerStageDescriptorUniformBuffers_             = {},
                            uint32_t                         maxPerStageDescriptorStorageBuffers_             = {},
                            uint32_t                         maxPerStageDescriptorSampledImages_              = {},
                            uint32_t                         maxPerStageDescriptorStorageImages_              = {},
                            uint32_t                         maxPerStageDescriptorInputAttachments_           = {},
                            uint32_t                         maxPerStageResources_                            = {},
                            uint32_t                         maxDescriptorSetSamplers_                        = {},
                            uint32_t                         maxDescriptorSetUniformBuffers_                  = {},
                            uint32_t                         maxDescriptorSetUniformBuffersDynamic_           = {},
                            uint32_t                         maxDescriptorSetStorageBuffers_                  = {},
                            uint32_t                         maxDescriptorSetStorageBuffersDynamic_           = {},
                            uint32_t                         maxDescriptorSetSampledImages_                   = {},
                            uint32_t                         maxDescriptorSetStorageImages_                   = {},
                            uint32_t                         maxDescriptorSetInputAttachments_                = {},
                            uint32_t                         maxVertexInputAttributes_                        = {},
                            uint32_t                         maxVertexInputBindings_                          = {},
                            uint32_t                         maxVertexInputAttributeOffset_                   = {},
                            uint32_t                         maxVertexInputBindingStride_                     = {},
                            uint32_t                         maxVertexOutputComponents_                       = {},
                            uint32_t                         maxTessellationGenerationLevel_                  = {},
                            uint32_t                         maxTessellationPatchSize_                        = {},
                            uint32_t                         maxTessellationControlPerVertexInputComponents_  = {},
                            uint32_t                         maxTessellationControlPerVertexOutputComponents_ = {},
                            uint32_t                         maxTessellationControlPerPatchOutputComponents_  = {},
                            uint32_t                         maxTessellationControlTotalOutputComponents_     = {},
                            uint32_t                         maxTessellationEvaluationInputComponents_        = {},
                            uint32_t                         maxTessellationEvaluationOutputComponents_       = {},
                            uint32_t                         maxGeometryShaderInvocations_                    = {},
                            uint32_t                         maxGeometryInputComponents_                      = {},
                            uint32_t                         maxGeometryOutputComponents_                     = {},
                            uint32_t                         maxGeometryOutputVertices_                       = {},
                            uint32_t                         maxGeometryTotalOutputComponents_                = {},
                            uint32_t                         maxFragmentInputComponents_                      = {},
                            uint32_t                         maxFragmentOutputAttachments_                    = {},
                            uint32_t                         maxFragmentDualSrcAttachments_                   = {},
                            uint32_t                         maxFragmentCombinedOutputResources_              = {},
                            uint32_t                         maxComputeSharedMemorySize_                      = {},
                            std::array<uint32_t, 3> const &  maxComputeWorkGroupCount_                        = {},
                            uint32_t                         maxComputeWorkGroupInvocations_                  = {},
                            std::array<uint32_t, 3> const &  maxComputeWorkGroupSize_                         = {},
                            uint32_t                         subPixelPrecisionBits_                           = {},
                            uint32_t                         subTexelPrecisionBits_                           = {},
                            uint32_t                         mipmapPrecisionBits_                             = {},
                            uint32_t                         maxDrawIndexedIndexValue_                        = {},
                            uint32_t                         maxDrawIndirectCount_                            = {},
                            float                            maxSamplerLodBias_                               = {},
                            float                            maxSamplerAnisotropy_                            = {},
                            uint32_t                         maxViewports_                                    = {},
                            std::array<uint32_t, 2> const &  maxViewportDimensions_                           = {},
                            std::array<float, 2> const &     viewportBoundsRange_                             = {},
                            uint32_t                         viewportSubPixelBits_                            = {},
                            size_t                           minMemoryMapAlignment_                           = {},
                            VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_                   = {},
                            VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_                 = {},
                            VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_                 = {},
                            int32_t                          minTexelOffset_                                  = {},
                            uint32_t                         maxTexelOffset_                                  = {},
                            int32_t                          minTexelGatherOffset_                            = {},
                            uint32_t                         maxTexelGatherOffset_                            = {},
                            float                            minInterpolationOffset_                          = {},
                            float                            maxInterpolationOffset_                          = {},
                            uint32_t                         subPixelInterpolationOffsetBits_                 = {},
                            uint32_t                         maxFramebufferWidth_                             = {},
                            uint32_t                         maxFramebufferHeight_                            = {},
                            uint32_t                         maxFramebufferLayers_                            = {},
                            VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_              = {},
                            VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_              = {},
                            VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_            = {},
                            VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_      = {},
                            uint32_t                               maxColorAttachments_                       = {},
                            VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_             = {},
                            VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_           = {},
                            VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_             = {},
                            VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_           = {},
                            VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_                  = {},
                            uint32_t                               maxSampleMaskWords_                        = {},
                            VULKAN_HPP_NAMESPACE::Bool32           timestampComputeAndGraphics_               = {},
                            float                                  timestampPeriod_                           = {},
                            uint32_t                               maxClipDistances_                          = {},
                            uint32_t                               maxCullDistances_                          = {},
                            uint32_t                               maxCombinedClipAndCullDistances_           = {},
                            uint32_t                               discreteQueuePriorities_                   = {},
                            std::array<float, 2> const &           pointSizeRange_                            = {},
                            std::array<float, 2> const &           lineWidthRange_                            = {},
                            float                                  pointSizeGranularity_                      = {},
                            float                                  lineWidthGranularity_                      = {},
                            VULKAN_HPP_NAMESPACE::Bool32           strictLines_                               = {},
                            VULKAN_HPP_NAMESPACE::Bool32           standardSampleLocations_                   = {},
                            VULKAN_HPP_NAMESPACE::DeviceSize       optimalBufferCopyOffsetAlignment_          = {},
                            VULKAN_HPP_NAMESPACE::DeviceSize       optimalBufferCopyRowPitchAlignment_        = {},
                            VULKAN_HPP_NAMESPACE::DeviceSize       nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxImageDimension1D( maxImageDimension1D_ )
      , maxImageDimension2D( maxImageDimension2D_ )
      , maxImageDimension3D( maxImageDimension3D_ )
      , maxImageDimensionCube( maxImageDimensionCube_ )
      , maxImageArrayLayers( maxImageArrayLayers_ )
      , maxTexelBufferElements( maxTexelBufferElements_ )
      , maxUniformBufferRange( maxUniformBufferRange_ )
      , maxStorageBufferRange( maxStorageBufferRange_ )
      , maxPushConstantsSize( maxPushConstantsSize_ )
      , maxMemoryAllocationCount( maxMemoryAllocationCount_ )
      , maxSamplerAllocationCount( maxSamplerAllocationCount_ )
      , bufferImageGranularity( bufferImageGranularity_ )
      , sparseAddressSpaceSize( sparseAddressSpaceSize_ )
      , maxBoundDescriptorSets( maxBoundDescriptorSets_ )
      , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ )
      , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ )
      , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ )
      , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ )
      , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ )
      , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ )
      , maxPerStageResources( maxPerStageResources_ )
      , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ )
      , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ )
      , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ )
      , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ )
      , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ )
      , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ )
      , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ )
      , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ )
      , maxVertexInputAttributes( maxVertexInputAttributes_ )
      , maxVertexInputBindings( maxVertexInputBindings_ )
      , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ )
      , maxVertexInputBindingStride( maxVertexInputBindingStride_ )
      , maxVertexOutputComponents( maxVertexOutputComponents_ )
      , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ )
      , maxTessellationPatchSize( maxTessellationPatchSize_ )
      , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ )
      , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ )
      , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ )
      , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ )
      , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ )
      , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ )
      , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ )
      , maxGeometryInputComponents( maxGeometryInputComponents_ )
      , maxGeometryOutputComponents( maxGeometryOutputComponents_ )
      , maxGeometryOutputVertices( maxGeometryOutputVertices_ )
      , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ )
      , maxFragmentInputComponents( maxFragmentInputComponents_ )
      , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ )
      , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ )
      , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ )
      , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ )
      , maxComputeWorkGroupCount( maxComputeWorkGroupCount_ )
      , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ )
      , maxComputeWorkGroupSize( maxComputeWorkGroupSize_ )
      , subPixelPrecisionBits( subPixelPrecisionBits_ )
      , subTexelPrecisionBits( subTexelPrecisionBits_ )
      , mipmapPrecisionBits( mipmapPrecisionBits_ )
      , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ )
      , maxDrawIndirectCount( maxDrawIndirectCount_ )
      , maxSamplerLodBias( maxSamplerLodBias_ )
      , maxSamplerAnisotropy( maxSamplerAnisotropy_ )
      , maxViewports( maxViewports_ )
      , maxViewportDimensions( maxViewportDimensions_ )
      , viewportBoundsRange( viewportBoundsRange_ )
      , viewportSubPixelBits( viewportSubPixelBits_ )
      , minMemoryMapAlignment( minMemoryMapAlignment_ )
      , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ )
      , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ )
      , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ )
      , minTexelOffset( minTexelOffset_ )
      , maxTexelOffset( maxTexelOffset_ )
      , minTexelGatherOffset( minTexelGatherOffset_ )
      , maxTexelGatherOffset( maxTexelGatherOffset_ )
      , minInterpolationOffset( minInterpolationOffset_ )
      , maxInterpolationOffset( maxInterpolationOffset_ )
      , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ )
      , maxFramebufferWidth( maxFramebufferWidth_ )
      , maxFramebufferHeight( maxFramebufferHeight_ )
      , maxFramebufferLayers( maxFramebufferLayers_ )
      , framebufferColorSampleCounts( framebufferColorSampleCounts_ )
      , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ )
      , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ )
      , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ )
      , maxColorAttachments( maxColorAttachments_ )
      , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ )
      , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ )
      , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ )
      , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ )
      , storageImageSampleCounts( storageImageSampleCounts_ )
      , maxSampleMaskWords( maxSampleMaskWords_ )
      , timestampComputeAndGraphics( timestampComputeAndGraphics_ )
      , timestampPeriod( timestampPeriod_ )
      , maxClipDistances( maxClipDistances_ )
      , maxCullDistances( maxCullDistances_ )
      , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ )
      , discreteQueuePriorities( discreteQueuePriorities_ )
      , pointSizeRange( pointSizeRange_ )
      , lineWidthRange( lineWidthRange_ )
      , pointSizeGranularity( pointSizeGranularity_ )
      , lineWidthGranularity( lineWidthGranularity_ )
      , strictLines( strictLines_ )
      , standardSampleLocations( standardSampleLocations_ )
      , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ )
      , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ )
      , nonCoherentAtomSize( nonCoherentAtomSize_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLimits( *reinterpret_cast<PhysicalDeviceLimits const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits &
                            operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLimits *>( this );
    }

    operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLimits *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLimits const & ) const = default;
#else
    bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) &&
             ( maxImageDimension3D == rhs.maxImageDimension3D ) &&
             ( maxImageDimensionCube == rhs.maxImageDimensionCube ) &&
             ( maxImageArrayLayers == rhs.maxImageArrayLayers ) &&
             ( maxTexelBufferElements == rhs.maxTexelBufferElements ) &&
             ( maxUniformBufferRange == rhs.maxUniformBufferRange ) &&
             ( maxStorageBufferRange == rhs.maxStorageBufferRange ) &&
             ( maxPushConstantsSize == rhs.maxPushConstantsSize ) &&
             ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) &&
             ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) &&
             ( bufferImageGranularity == rhs.bufferImageGranularity ) &&
             ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) &&
             ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) &&
             ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) &&
             ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) &&
             ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) &&
             ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) &&
             ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) &&
             ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) &&
             ( maxPerStageResources == rhs.maxPerStageResources ) &&
             ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) &&
             ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) &&
             ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) &&
             ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) &&
             ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) &&
             ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) &&
             ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) &&
             ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) &&
             ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) &&
             ( maxVertexInputBindings == rhs.maxVertexInputBindings ) &&
             ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) &&
             ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) &&
             ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) &&
             ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) &&
             ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) &&
             ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) &&
             ( maxTessellationControlPerVertexOutputComponents ==
               rhs.maxTessellationControlPerVertexOutputComponents ) &&
             ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) &&
             ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) &&
             ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) &&
             ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) &&
             ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) &&
             ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) &&
             ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) &&
             ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) &&
             ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) &&
             ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) &&
             ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) &&
             ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) &&
             ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) &&
             ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) &&
             ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) &&
             ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) &&
             ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) &&
             ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) &&
             ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) &&
             ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) &&
             ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) &&
             ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) &&
             ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) &&
             ( maxViewportDimensions == rhs.maxViewportDimensions ) &&
             ( viewportBoundsRange == rhs.viewportBoundsRange ) &&
             ( viewportSubPixelBits == rhs.viewportSubPixelBits ) &&
             ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) &&
             ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) &&
             ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) &&
             ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) &&
             ( minTexelOffset == rhs.minTexelOffset ) && ( maxTexelOffset == rhs.maxTexelOffset ) &&
             ( minTexelGatherOffset == rhs.minTexelGatherOffset ) &&
             ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) &&
             ( minInterpolationOffset == rhs.minInterpolationOffset ) &&
             ( maxInterpolationOffset == rhs.maxInterpolationOffset ) &&
             ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) &&
             ( maxFramebufferWidth == rhs.maxFramebufferWidth ) &&
             ( maxFramebufferHeight == rhs.maxFramebufferHeight ) &&
             ( maxFramebufferLayers == rhs.maxFramebufferLayers ) &&
             ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) &&
             ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) &&
             ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) &&
             ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) &&
             ( maxColorAttachments == rhs.maxColorAttachments ) &&
             ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) &&
             ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) &&
             ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) &&
             ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) &&
             ( storageImageSampleCounts == rhs.storageImageSampleCounts ) &&
             ( maxSampleMaskWords == rhs.maxSampleMaskWords ) &&
             ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) &&
             ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) &&
             ( maxCullDistances == rhs.maxCullDistances ) &&
             ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) &&
             ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && ( pointSizeRange == rhs.pointSizeRange ) &&
             ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) &&
             ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) &&
             ( standardSampleLocations == rhs.standardSampleLocations ) &&
             ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) &&
             ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) &&
             ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
    }

    bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                          maxImageDimension1D                             = {};
    uint32_t                                          maxImageDimension2D                             = {};
    uint32_t                                          maxImageDimension3D                             = {};
    uint32_t                                          maxImageDimensionCube                           = {};
    uint32_t                                          maxImageArrayLayers                             = {};
    uint32_t                                          maxTexelBufferElements                          = {};
    uint32_t                                          maxUniformBufferRange                           = {};
    uint32_t                                          maxStorageBufferRange                           = {};
    uint32_t                                          maxPushConstantsSize                            = {};
    uint32_t                                          maxMemoryAllocationCount                        = {};
    uint32_t                                          maxSamplerAllocationCount                       = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  bufferImageGranularity                          = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  sparseAddressSpaceSize                          = {};
    uint32_t                                          maxBoundDescriptorSets                          = {};
    uint32_t                                          maxPerStageDescriptorSamplers                   = {};
    uint32_t                                          maxPerStageDescriptorUniformBuffers             = {};
    uint32_t                                          maxPerStageDescriptorStorageBuffers             = {};
    uint32_t                                          maxPerStageDescriptorSampledImages              = {};
    uint32_t                                          maxPerStageDescriptorStorageImages              = {};
    uint32_t                                          maxPerStageDescriptorInputAttachments           = {};
    uint32_t                                          maxPerStageResources                            = {};
    uint32_t                                          maxDescriptorSetSamplers                        = {};
    uint32_t                                          maxDescriptorSetUniformBuffers                  = {};
    uint32_t                                          maxDescriptorSetUniformBuffersDynamic           = {};
    uint32_t                                          maxDescriptorSetStorageBuffers                  = {};
    uint32_t                                          maxDescriptorSetStorageBuffersDynamic           = {};
    uint32_t                                          maxDescriptorSetSampledImages                   = {};
    uint32_t                                          maxDescriptorSetStorageImages                   = {};
    uint32_t                                          maxDescriptorSetInputAttachments                = {};
    uint32_t                                          maxVertexInputAttributes                        = {};
    uint32_t                                          maxVertexInputBindings                          = {};
    uint32_t                                          maxVertexInputAttributeOffset                   = {};
    uint32_t                                          maxVertexInputBindingStride                     = {};
    uint32_t                                          maxVertexOutputComponents                       = {};
    uint32_t                                          maxTessellationGenerationLevel                  = {};
    uint32_t                                          maxTessellationPatchSize                        = {};
    uint32_t                                          maxTessellationControlPerVertexInputComponents  = {};
    uint32_t                                          maxTessellationControlPerVertexOutputComponents = {};
    uint32_t                                          maxTessellationControlPerPatchOutputComponents  = {};
    uint32_t                                          maxTessellationControlTotalOutputComponents     = {};
    uint32_t                                          maxTessellationEvaluationInputComponents        = {};
    uint32_t                                          maxTessellationEvaluationOutputComponents       = {};
    uint32_t                                          maxGeometryShaderInvocations                    = {};
    uint32_t                                          maxGeometryInputComponents                      = {};
    uint32_t                                          maxGeometryOutputComponents                     = {};
    uint32_t                                          maxGeometryOutputVertices                       = {};
    uint32_t                                          maxGeometryTotalOutputComponents                = {};
    uint32_t                                          maxFragmentInputComponents                      = {};
    uint32_t                                          maxFragmentOutputAttachments                    = {};
    uint32_t                                          maxFragmentDualSrcAttachments                   = {};
    uint32_t                                          maxFragmentCombinedOutputResources              = {};
    uint32_t                                          maxComputeSharedMemorySize                      = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupCount                        = {};
    uint32_t                                          maxComputeWorkGroupInvocations                  = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupSize                         = {};
    uint32_t                                          subPixelPrecisionBits                           = {};
    uint32_t                                          subTexelPrecisionBits                           = {};
    uint32_t                                          mipmapPrecisionBits                             = {};
    uint32_t                                          maxDrawIndexedIndexValue                        = {};
    uint32_t                                          maxDrawIndirectCount                            = {};
    float                                             maxSamplerLodBias                               = {};
    float                                             maxSamplerAnisotropy                            = {};
    uint32_t                                          maxViewports                                    = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> maxViewportDimensions                           = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2>    viewportBoundsRange                             = {};
    uint32_t                                          viewportSubPixelBits                            = {};
    size_t                                            minMemoryMapAlignment                           = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  minTexelBufferOffsetAlignment                   = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  minUniformBufferOffsetAlignment                 = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  minStorageBufferOffsetAlignment                 = {};
    int32_t                                           minTexelOffset                                  = {};
    uint32_t                                          maxTexelOffset                                  = {};
    int32_t                                           minTexelGatherOffset                            = {};
    uint32_t                                          maxTexelGatherOffset                            = {};
    float                                             minInterpolationOffset                          = {};
    float                                             maxInterpolationOffset                          = {};
    uint32_t                                          subPixelInterpolationOffsetBits                 = {};
    uint32_t                                          maxFramebufferWidth                             = {};
    uint32_t                                          maxFramebufferHeight                            = {};
    uint32_t                                          maxFramebufferLayers                            = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags            framebufferColorSampleCounts                    = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags            framebufferDepthSampleCounts                    = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags            framebufferStencilSampleCounts                  = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags            framebufferNoAttachmentsSampleCounts            = {};
    uint32_t                                          maxColorAttachments                             = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags            sampledImageColorSampleCounts                   = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags            sampledImageIntegerSampleCounts                 = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags            sampledImageDepthSampleCounts                   = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags            sampledImageStencilSampleCounts                 = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags            storageImageSampleCounts                        = {};
    uint32_t                                          maxSampleMaskWords                              = {};
    VULKAN_HPP_NAMESPACE::Bool32                      timestampComputeAndGraphics                     = {};
    float                                             timestampPeriod                                 = {};
    uint32_t                                          maxClipDistances                                = {};
    uint32_t                                          maxCullDistances                                = {};
    uint32_t                                          maxCombinedClipAndCullDistances                 = {};
    uint32_t                                          discreteQueuePriorities                         = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2>    pointSizeRange                                  = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2>    lineWidthRange                                  = {};
    float                                             pointSizeGranularity                            = {};
    float                                             lineWidthGranularity                            = {};
    VULKAN_HPP_NAMESPACE::Bool32                      strictLines                                     = {};
    VULKAN_HPP_NAMESPACE::Bool32                      standardSampleLocations                         = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  optimalBufferCopyOffsetAlignment                = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  optimalBufferCopyRowPitchAlignment              = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                  nonCoherentAtomSize                             = {};
  };
  static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );

  struct PhysicalDeviceSparseProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(
      VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_            = {},
      VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_            = {},
      VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_               = {} ) VULKAN_HPP_NOEXCEPT
      : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ )
      , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ )
      , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ )
      , residencyAlignedMipSize( residencyAlignedMipSize_ )
      , residencyNonResidentStrict( residencyNonResidentStrict_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSparseProperties( *reinterpret_cast<PhysicalDeviceSparseProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseProperties &
                            operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSparseProperties *>( this );
    }

    operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSparseProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) &&
             ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) &&
             ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) &&
             ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) &&
             ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
    }

    bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape            = {};
    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
    VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape            = {};
    VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize                  = {};
    VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict               = {};
  };
  static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSparseProperties>::value,
                 "struct wrapper is not a standard layout!" );

  struct PhysicalDeviceProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(
      uint32_t                                 apiVersion_    = {},
      uint32_t                                 driverVersion_ = {},
      uint32_t                                 vendorID_      = {},
      uint32_t                                 deviceID_      = {},
      VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_    = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther,
      std::array<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const & deviceName_        = {},
      std::array<uint8_t, VK_UUID_SIZE> const &                  pipelineCacheUUID_ = {},
      VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits                 limits_            = {},
      VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties       sparseProperties_  = {} ) VULKAN_HPP_NOEXCEPT
      : apiVersion( apiVersion_ )
      , driverVersion( driverVersion_ )
      , vendorID( vendorID_ )
      , deviceID( deviceID_ )
      , deviceType( deviceType_ )
      , deviceName( deviceName_ )
      , pipelineCacheUUID( pipelineCacheUUID_ )
      , limits( limits_ )
      , sparseProperties( sparseProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProperties( *reinterpret_cast<PhysicalDeviceProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties &
                            operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProperties *>( this );
    }

    operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) &&
             ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( deviceType == rhs.deviceType ) &&
             ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) &&
             ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties );
    }

    bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                 apiVersion    = {};
    uint32_t                                 driverVersion = {};
    uint32_t                                 vendorID      = {};
    uint32_t                                 deviceID      = {};
    VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType    = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName        = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE>                  pipelineCacheUUID = {};
    VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits                                   limits            = {};
    VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties                         sparseProperties  = {};
  };
  static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );

  struct PhysicalDeviceProperties2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT
      : properties( properties_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProperties2( *reinterpret_cast<PhysicalDeviceProperties2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2 &
                            operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProperties2 *>( this );
    }

    operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProperties2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProperties2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties );
    }

    bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType      = StructureType::ePhysicalDeviceProperties2;
    void *                                         pNext      = {};
    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
  };
  static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceProperties2>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
  {
    using Type = PhysicalDeviceProperties2;
  };
  using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;

  struct QueryPoolPerformanceCreateInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eQueryPoolPerformanceCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t         queueFamilyIndex_  = {},
                                                            uint32_t         counterIndexCount_ = {},
                                                            const uint32_t * pCounterIndices_ = {} ) VULKAN_HPP_NOEXCEPT
      : queueFamilyIndex( queueFamilyIndex_ )
      , counterIndexCount( counterIndexCount_ )
      , pCounterIndices( pCounterIndices_ )
    {}

    VULKAN_HPP_CONSTEXPR
      QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast<QueryPoolPerformanceCreateInfoKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    QueryPoolPerformanceCreateInfoKHR(
      uint32_t                                                              queueFamilyIndex_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ )
      : queueFamilyIndex( queueFamilyIndex_ )
      , counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) )
      , pCounterIndices( counterIndices_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR &
                            operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>( &rhs );
      return *this;
    }

    QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndex = queueFamilyIndex_;
      return *this;
    }

    QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      counterIndexCount = counterIndexCount_;
      return *this;
    }

    QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pCounterIndices = pCounterIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    QueryPoolPerformanceCreateInfoKHR & setCounterIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
      pCounterIndices   = counterIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( this );
    }

    operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default;
#else
    bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) &&
             ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices );
    }

    bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::eQueryPoolPerformanceCreateInfoKHR;
    const void *                        pNext             = {};
    uint32_t                            queueFamilyIndex  = {};
    uint32_t                            counterIndexCount = {};
    const uint32_t *                    pCounterIndices   = {};
  };
  static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<QueryPoolPerformanceCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
  {
    using Type = QueryPoolPerformanceCreateInfoKHR;
  };

  struct QueueFamilyProperties
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_                  = {},
                             uint32_t                         queueCount_                  = {},
                             uint32_t                         timestampValidBits_          = {},
                             VULKAN_HPP_NAMESPACE::Extent3D   minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
      : queueFlags( queueFlags_ )
      , queueCount( queueCount_ )
      , timestampValidBits( timestampValidBits_ )
      , minImageTransferGranularity( minImageTransferGranularity_ )
    {}

    VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyProperties( *reinterpret_cast<QueueFamilyProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyProperties &
                            operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyProperties *>( this );
    }

    operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyProperties const & ) const = default;
#else
    bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) &&
             ( timestampValidBits == rhs.timestampValidBits ) &&
             ( minImageTransferGranularity == rhs.minImageTransferGranularity );
    }

    bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::QueueFlags queueFlags                  = {};
    uint32_t                         queueCount                  = {};
    uint32_t                         timestampValidBits          = {};
    VULKAN_HPP_NAMESPACE::Extent3D   minImageTransferGranularity = {};
  };
  static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );

  struct QueueFamilyProperties2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(
      VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : queueFamilyProperties( queueFamilyProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyProperties2( *reinterpret_cast<QueueFamilyProperties2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyProperties2 &
                            operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyProperties2 *>( this );
    }

    operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyProperties2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyProperties2 const & ) const = default;
#else
    bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties );
    }

    bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType                 = StructureType::eQueueFamilyProperties2;
    void *                                      pNext                 = {};
    VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
  };
  static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
  {
    using Type = QueueFamilyProperties2;
  };
  using QueueFamilyProperties2KHR = QueueFamilyProperties2;

  struct PhysicalDeviceSparseImageFormatInfo2
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceSparseImageFormatInfo2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(
      VULKAN_HPP_NAMESPACE::Format              format_  = VULKAN_HPP_NAMESPACE::Format::eUndefined,
      VULKAN_HPP_NAMESPACE::ImageType           type_    = VULKAN_HPP_NAMESPACE::ImageType::e1D,
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
      VULKAN_HPP_NAMESPACE::ImageUsageFlags     usage_   = {},
      VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT
      : format( format_ )
      , type( type_ )
      , samples( samples_ )
      , usage( usage_ )
      , tiling( tiling_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast<PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 &
                            operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSparseImageFormatInfo2 &
      operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs );
      return *this;
    }

    PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
    {
      type = type_;
      return *this;
    }

    PhysicalDeviceSparseImageFormatInfo2 &
      setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
    {
      samples = samples_;
      return *this;
    }

    PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
    {
      tiling = tiling_;
      return *this;
    }

    operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( this );
    }

    operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default;
#else
    bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) &&
             ( samples == rhs.samples ) && ( usage == rhs.usage ) && ( tiling == rhs.tiling );
    }

    bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType   = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
    const void *                              pNext   = {};
    VULKAN_HPP_NAMESPACE::Format              format  = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::ImageType           type    = VULKAN_HPP_NAMESPACE::ImageType::e1D;
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
    VULKAN_HPP_NAMESPACE::ImageUsageFlags     usage   = {};
    VULKAN_HPP_NAMESPACE::ImageTiling         tiling  = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
  };
  static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSparseImageFormatInfo2>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSparseImageFormatInfo2>
  {
    using Type = PhysicalDeviceSparseImageFormatInfo2;
  };
  using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;

  struct SparseImageFormatProperties2
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(
      VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT
      : properties( properties_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
      : SparseImageFormatProperties2( *reinterpret_cast<SparseImageFormatProperties2 const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SparseImageFormatProperties2 &
                            operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>( &rhs );
      return *this;
    }

    operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSparseImageFormatProperties2 *>( this );
    }

    operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSparseImageFormatProperties2 *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SparseImageFormatProperties2 const & ) const = default;
#else
    bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties );
    }

    bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType      = StructureType::eSparseImageFormatProperties2;
    void *                                            pNext      = {};
    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
  };
  static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SparseImageFormatProperties2>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSparseImageFormatProperties2>
  {
    using Type = SparseImageFormatProperties2;
  };
  using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;

  struct FramebufferMixedSamplesCombinationNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eFramebufferMixedSamplesCombinationNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(
      VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ =
        VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge,
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
      VULKAN_HPP_NAMESPACE::SampleCountFlags    depthStencilSamples_  = {},
      VULKAN_HPP_NAMESPACE::SampleCountFlags    colorSamples_         = {} ) VULKAN_HPP_NOEXCEPT
      : coverageReductionMode( coverageReductionMode_ )
      , rasterizationSamples( rasterizationSamples_ )
      , depthStencilSamples( depthStencilSamples_ )
      , colorSamples( colorSamples_ )
    {}

    VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : FramebufferMixedSamplesCombinationNV( *reinterpret_cast<FramebufferMixedSamplesCombinationNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FramebufferMixedSamplesCombinationNV &
                            operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferMixedSamplesCombinationNV &
      operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>( &rhs );
      return *this;
    }

    operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV *>( this );
    }

    operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default;
#else
    bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( coverageReductionMode == rhs.coverageReductionMode ) &&
             ( rasterizationSamples == rhs.rasterizationSamples ) &&
             ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples );
    }

    bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType           sType = StructureType::eFramebufferMixedSamplesCombinationNV;
    void *                                        pNext = {};
    VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode =
      VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
    VULKAN_HPP_NAMESPACE::SampleCountFlags    depthStencilSamples  = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags    colorSamples         = {};
  };
  static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FramebufferMixedSamplesCombinationNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFramebufferMixedSamplesCombinationNV>
  {
    using Type = FramebufferMixedSamplesCombinationNV;
  };

  struct SurfaceCapabilities2EXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(
      uint32_t                                          minImageCount_       = {},
      uint32_t                                          maxImageCount_       = {},
      VULKAN_HPP_NAMESPACE::Extent2D                    currentExtent_       = {},
      VULKAN_HPP_NAMESPACE::Extent2D                    minImageExtent_      = {},
      VULKAN_HPP_NAMESPACE::Extent2D                    maxImageExtent_      = {},
      uint32_t                                          maxImageArrayLayers_ = {},
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR    supportedTransforms_ = {},
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ =
        VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
      VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_  = {},
      VULKAN_HPP_NAMESPACE::ImageUsageFlags        supportedUsageFlags_      = {},
      VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT
      : minImageCount( minImageCount_ )
      , maxImageCount( maxImageCount_ )
      , currentExtent( currentExtent_ )
      , minImageExtent( minImageExtent_ )
      , maxImageExtent( maxImageExtent_ )
      , maxImageArrayLayers( maxImageArrayLayers_ )
      , supportedTransforms( supportedTransforms_ )
      , currentTransform( currentTransform_ )
      , supportedCompositeAlpha( supportedCompositeAlpha_ )
      , supportedUsageFlags( supportedUsageFlags_ )
      , supportedSurfaceCounters( supportedSurfaceCounters_ )
    {}

    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilities2EXT( *reinterpret_cast<SurfaceCapabilities2EXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilities2EXT &
                            operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>( &rhs );
      return *this;
    }

    operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilities2EXT *>( this );
    }

    operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilities2EXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilities2EXT const & ) const = default;
#else
    bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) &&
             ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) &&
             ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) &&
             ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) &&
             ( currentTransform == rhs.currentTransform ) &&
             ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) &&
             ( supportedUsageFlags == rhs.supportedUsageFlags ) &&
             ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
    }

    bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType               = StructureType::eSurfaceCapabilities2EXT;
    void *                                            pNext               = {};
    uint32_t                                          minImageCount       = {};
    uint32_t                                          maxImageCount       = {};
    VULKAN_HPP_NAMESPACE::Extent2D                    currentExtent       = {};
    VULKAN_HPP_NAMESPACE::Extent2D                    minImageExtent      = {};
    VULKAN_HPP_NAMESPACE::Extent2D                    maxImageExtent      = {};
    uint32_t                                          maxImageArrayLayers = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR    supportedTransforms = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform =
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha  = {};
    VULKAN_HPP_NAMESPACE::ImageUsageFlags        supportedUsageFlags      = {};
    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
  };
  static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
  {
    using Type = SurfaceCapabilities2EXT;
  };

  struct SurfaceCapabilitiesKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SurfaceCapabilitiesKHR( uint32_t                                          minImageCount_       = {},
                              uint32_t                                          maxImageCount_       = {},
                              VULKAN_HPP_NAMESPACE::Extent2D                    currentExtent_       = {},
                              VULKAN_HPP_NAMESPACE::Extent2D                    minImageExtent_      = {},
                              VULKAN_HPP_NAMESPACE::Extent2D                    maxImageExtent_      = {},
                              uint32_t                                          maxImageArrayLayers_ = {},
                              VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR    supportedTransforms_ = {},
                              VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ =
                                VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
                              VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {},
                              VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
      : minImageCount( minImageCount_ )
      , maxImageCount( maxImageCount_ )
      , currentExtent( currentExtent_ )
      , minImageExtent( minImageExtent_ )
      , maxImageExtent( maxImageExtent_ )
      , maxImageArrayLayers( maxImageArrayLayers_ )
      , supportedTransforms( supportedTransforms_ )
      , currentTransform( currentTransform_ )
      , supportedCompositeAlpha( supportedCompositeAlpha_ )
      , supportedUsageFlags( supportedUsageFlags_ )
    {}

    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilitiesKHR( *reinterpret_cast<SurfaceCapabilitiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesKHR &
                            operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilitiesKHR *>( this );
    }

    operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default;
#else
    bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) &&
             ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) &&
             ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) &&
             ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) &&
             ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) &&
             ( supportedUsageFlags == rhs.supportedUsageFlags );
    }

    bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                          minImageCount       = {};
    uint32_t                                          maxImageCount       = {};
    VULKAN_HPP_NAMESPACE::Extent2D                    currentExtent       = {};
    VULKAN_HPP_NAMESPACE::Extent2D                    minImageExtent      = {};
    VULKAN_HPP_NAMESPACE::Extent2D                    maxImageExtent      = {};
    uint32_t                                          maxImageArrayLayers = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR    supportedTransforms = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform =
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
    VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
    VULKAN_HPP_NAMESPACE::ImageUsageFlags        supportedUsageFlags     = {};
  };
  static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );

  struct SurfaceCapabilities2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(
      VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT
      : surfaceCapabilities( surfaceCapabilities_ )
    {}

    VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilities2KHR( *reinterpret_cast<SurfaceCapabilities2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilities2KHR &
                            operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>( &rhs );
      return *this;
    }

    operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilities2KHR *>( this );
    }

    operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilities2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilities2KHR const & ) const = default;
#else
    bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities );
    }

    bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType               = StructureType::eSurfaceCapabilities2KHR;
    void *                                       pNext               = {};
    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
  };
  static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
  {
    using Type = SurfaceCapabilities2KHR;
  };

  struct SurfaceFormatKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format        format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined,
                        VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ =
                          VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT
      : format( format_ )
      , colorSpace( colorSpace_ )
    {}

    VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceFormatKHR( *reinterpret_cast<SurfaceFormatKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>( &rhs );
      return *this;
    }

    operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceFormatKHR *>( this );
    }

    operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceFormatKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceFormatKHR const & ) const = default;
#else
    bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace );
    }

    bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Format        format     = VULKAN_HPP_NAMESPACE::Format::eUndefined;
    VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
  };
  static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );

  struct SurfaceFormat2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT
      : surfaceFormat( surfaceFormat_ )
    {}

    VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceFormat2KHR( *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SurfaceFormat2KHR &
                            operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>( &rhs );
      return *this;
    }

    operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceFormat2KHR *>( this );
    }

    operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceFormat2KHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceFormat2KHR const & ) const = default;
#else
    bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat );
    }

    bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType         = StructureType::eSurfaceFormat2KHR;
    void *                                 pNext         = {};
    VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
  };
  static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
  {
    using Type = SurfaceFormat2KHR;
  };

  struct PhysicalDeviceToolPropertiesEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT(
      std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & name_        = {},
      std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & version_     = {},
      VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT            purposes_    = {},
      std::array<char, VK_MAX_DESCRIPTION_SIZE> const &    description_ = {},
      std::array<char, VK_MAX_EXTENSION_NAME_SIZE> const & layer_       = {} ) VULKAN_HPP_NOEXCEPT
      : name( name_ )
      , version( version_ )
      , purposes( purposes_ )
      , description( description_ )
      , layer( layer_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceToolPropertiesEXT( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceToolPropertiesEXT( *reinterpret_cast<PhysicalDeviceToolPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT &
                            operator=( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceToolPropertiesEXT & operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceToolPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceToolPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceToolPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceToolPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) &&
             ( purposes == rhs.purposes ) && ( description == rhs.description ) && ( layer == rhs.layer );
    }

    bool operator!=( PhysicalDeviceToolPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name        = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version     = {};
    VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT                              purposes    = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE>    description = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer       = {};
  };
  static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceToolPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceToolPropertiesEXT>
  {
    using Type = PhysicalDeviceToolPropertiesEXT;
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoCapabilitiesKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilitiesFlagsKHR capabilityFlags_    = {},
                                               VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {},
                                               VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_   = {},
                                               VULKAN_HPP_NAMESPACE::Extent2D   videoPictureExtentGranularity_     = {},
                                               VULKAN_HPP_NAMESPACE::Extent2D   minExtent_                         = {},
                                               VULKAN_HPP_NAMESPACE::Extent2D   maxExtent_                         = {},
                                               uint32_t                         maxReferencePicturesSlotsCount_    = {},
                                               uint32_t maxReferencePicturesActiveCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : capabilityFlags( capabilityFlags_ )
      , minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ )
      , minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ )
      , videoPictureExtentGranularity( videoPictureExtentGranularity_ )
      , minExtent( minExtent_ )
      , maxExtent( maxExtent_ )
      , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ )
      , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoCapabilitiesKHR( *reinterpret_cast<VideoCapabilitiesKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR &
                            operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoCapabilitiesKHR *>( this );
    }

    operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoCapabilitiesKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoCapabilitiesKHR const & ) const = default;
#  else
    bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilityFlags == rhs.capabilityFlags ) &&
             ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) &&
             ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) &&
             ( videoPictureExtentGranularity == rhs.videoPictureExtentGranularity ) && ( minExtent == rhs.minExtent ) &&
             ( maxExtent == rhs.maxExtent ) &&
             ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) &&
             ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount );
    }

    bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType             sType           = StructureType::eVideoCapabilitiesKHR;
    void *                                          pNext           = {};
    VULKAN_HPP_NAMESPACE::VideoCapabilitiesFlagsKHR capabilityFlags = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                minBitstreamBufferOffsetAlignment = {};
    VULKAN_HPP_NAMESPACE::DeviceSize                minBitstreamBufferSizeAlignment   = {};
    VULKAN_HPP_NAMESPACE::Extent2D                  videoPictureExtentGranularity     = {};
    VULKAN_HPP_NAMESPACE::Extent2D                  minExtent                         = {};
    VULKAN_HPP_NAMESPACE::Extent2D                  maxExtent                         = {};
    uint32_t                                        maxReferencePicturesSlotsCount    = {};
    uint32_t                                        maxReferencePicturesActiveCount   = {};
  };
  static_assert( sizeof( VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoCapabilitiesKHR>
  {
    using Type = VideoCapabilitiesKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoProfilesKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfilesKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoProfilesKHR( uint32_t                                      profileCount_ = {},
                        const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_    = {} ) VULKAN_HPP_NOEXCEPT
      : profileCount( profileCount_ )
      , pProfiles( pProfiles_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoProfilesKHR( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoProfilesKHR( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoProfilesKHR( *reinterpret_cast<VideoProfilesKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoProfilesKHR & operator=( VideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoProfilesKHR & operator=( VkVideoProfilesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoProfilesKHR const *>( &rhs );
      return *this;
    }

    VideoProfilesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoProfilesKHR & setProfileCount( uint32_t profileCount_ ) VULKAN_HPP_NOEXCEPT
    {
      profileCount = profileCount_;
      return *this;
    }

    VideoProfilesKHR & setPProfiles( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles_ ) VULKAN_HPP_NOEXCEPT
    {
      pProfiles = pProfiles_;
      return *this;
    }

    operator VkVideoProfilesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoProfilesKHR *>( this );
    }

    operator VkVideoProfilesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoProfilesKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoProfilesKHR const & ) const = default;
#  else
    bool operator==( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) &&
             ( pProfiles == rhs.pProfiles );
    }

    bool operator!=( VideoProfilesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType           sType        = StructureType::eVideoProfilesKHR;
    void *                                        pNext        = {};
    uint32_t                                      profileCount = {};
    const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pProfiles    = {};
  };
  static_assert( sizeof( VideoProfilesKHR ) == sizeof( VkVideoProfilesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoProfilesKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoProfilesKHR>
  {
    using Type = VideoProfilesKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct PhysicalDeviceVideoFormatInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR(
      VULKAN_HPP_NAMESPACE::ImageUsageFlags          imageUsage_     = {},
      const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles_ = {} ) VULKAN_HPP_NOEXCEPT
      : imageUsage( imageUsage_ )
      , pVideoProfiles( pVideoProfiles_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast<PhysicalDeviceVideoFormatInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR &
                            operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( this );
    }

    operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVideoFormatInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default;
#  else
    bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage ) &&
             ( pVideoProfiles == rhs.pVideoProfiles );
    }

    bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType          = StructureType::ePhysicalDeviceVideoFormatInfoKHR;
    const void *                                   pNext          = {};
    VULKAN_HPP_NAMESPACE::ImageUsageFlags          imageUsage     = {};
    const VULKAN_HPP_NAMESPACE::VideoProfilesKHR * pVideoProfiles = {};
  };
  static_assert( sizeof( PhysicalDeviceVideoFormatInfoKHR ) == sizeof( VkPhysicalDeviceVideoFormatInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVideoFormatInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVideoFormatInfoKHR>
  {
    using Type = PhysicalDeviceVideoFormatInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoFormatPropertiesKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR(
      VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
      : format( format_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoFormatPropertiesKHR( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoFormatPropertiesKHR( *reinterpret_cast<VideoFormatPropertiesKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoFormatPropertiesKHR &
                            operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkVideoFormatPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoFormatPropertiesKHR *>( this );
    }

    operator VkVideoFormatPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoFormatPropertiesKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoFormatPropertiesKHR const & ) const = default;
#  else
    bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format );
    }

    bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eVideoFormatPropertiesKHR;
    void *                              pNext  = {};
    VULKAN_HPP_NAMESPACE::Format        format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  };
  static_assert( sizeof( VideoFormatPropertiesKHR ) == sizeof( VkVideoFormatPropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoFormatPropertiesKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoFormatPropertiesKHR>
  {
    using Type = VideoFormatPropertiesKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  class UniqueHandleTraits<Device, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<NoParent, Dispatch>;
  };
  using UniqueDevice = UniqueHandle<Device, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/

  class PhysicalDevice
  {
  public:
    using CType = VkPhysicalDevice;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;

  public:
    VULKAN_HPP_CONSTEXPR         PhysicalDevice() = default;
    VULKAN_HPP_CONSTEXPR         PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
      : m_physicalDevice( physicalDevice )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    PhysicalDevice & operator=( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
    {
      m_physicalDevice = physicalDevice;
      return *this;
    }
#endif

    PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_physicalDevice = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevice const & ) const = default;
#else
    bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_physicalDevice == rhs.m_physicalDevice;
    }

    bool operator!=( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_physicalDevice != rhs.m_physicalDevice;
    }

    bool operator<( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_physicalDevice < rhs.m_physicalDevice;
    }
#endif

    //=== VK_VERSION_1_0 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,
                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
                         getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getFormatProperties( VULKAN_HPP_NAMESPACE::Format             format,
                              VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties
                         getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format                  format,
                                                   VULKAN_HPP_NAMESPACE::ImageType               type,
                                                   VULKAN_HPP_NAMESPACE::ImageTiling             tiling,
                                                   VULKAN_HPP_NAMESPACE::ImageUsageFlags         usage,
                                                   VULKAN_HPP_NAMESPACE::ImageCreateFlags        flags,
                                                   VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
      getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format           format,
                                VULKAN_HPP_NAMESPACE::ImageType        type,
                                VULKAN_HPP_NAMESPACE::ImageTiling      tiling,
                                VULKAN_HPP_NAMESPACE::ImageUsageFlags  usage,
                                VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
                         getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      getQueueFamilyProperties( uint32_t *                                    pQueueFamilyPropertyCount,
                                VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>,
              typename Dispatch                       = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
                         getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>,
      typename Dispatch                       = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                              = QueueFamilyPropertiesAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
                         getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
                         getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo *    pCreateInfo,
                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                       VULKAN_HPP_NAMESPACE::Device *                    pDevice,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type
      createDevice( const DeviceCreateInfo &            createInfo,
                    Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
      createDeviceUnique( const DeviceCreateInfo &            createInfo,
                          Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties(
      const char *                                pLayerName,
      uint32_t *                                  pPropertyCount,
      VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>,
              typename Dispatch                     = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
      enumerateDeviceExtensionProperties( Optional<const std::string> layerName
                                                                      VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>,
              typename Dispatch                     = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                            = ExtensionPropertiesAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
      enumerateDeviceExtensionProperties( Optional<const std::string>    layerName,
                                          ExtensionPropertiesAllocator & extensionPropertiesAllocator,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties(
      uint32_t *                              pPropertyCount,
      VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>,
              typename Dispatch                 = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
      enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>,
              typename Dispatch                 = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                        = LayerPropertiesAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
      enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format                        format,
                                         VULKAN_HPP_NAMESPACE::ImageType                     type,
                                         VULKAN_HPP_NAMESPACE::SampleCountFlagBits           samples,
                                         VULKAN_HPP_NAMESPACE::ImageUsageFlags               usage,
                                         VULKAN_HPP_NAMESPACE::ImageTiling                   tiling,
                                         uint32_t *                                          pPropertyCount,
                                         VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>,
              typename Dispatch                             = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
                         getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format              format,
                                                         VULKAN_HPP_NAMESPACE::ImageType           type,
                                                         VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
                                                         VULKAN_HPP_NAMESPACE::ImageUsageFlags     usage,
                                                         VULKAN_HPP_NAMESPACE::ImageTiling         tiling,
                                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>,
      typename Dispatch                             = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                    = SparseImageFormatPropertiesAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
                         getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format              format,
                                                         VULKAN_HPP_NAMESPACE::ImageType           type,
                                                         VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
                                                         VULKAN_HPP_NAMESPACE::ImageUsageFlags     usage,
                                                         VULKAN_HPP_NAMESPACE::ImageTiling         tiling,
                                                         SparseImageFormatPropertiesAllocator &    sparseImageFormatPropertiesAllocator,
                                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_VERSION_1_1 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
                         getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
                         getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
                         getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
                         getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format              format,
                               VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
                         getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
                         getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getImageFormatProperties2(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
      VULKAN_HPP_NAMESPACE::ImageFormatProperties2 *               pImageFormatProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
      getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
      getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getQueueFamilyProperties2( uint32_t *                                     pQueueFamilyPropertyCount,
                                    VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
              typename Dispatch                        = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
                         getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
      typename Dispatch                        = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                               = QueueFamilyProperties2Allocator,
      typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
                         getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
                                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename StructureChain,
              typename StructureChainAllocator = std::allocator<StructureChain>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
                         getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename StructureChain,
              typename StructureChainAllocator = std::allocator<StructureChain>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                       = StructureChainAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
                         getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator,
                                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
                         getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
                         getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getSparseImageFormatProperties2(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
      uint32_t *                                                         pPropertyCount,
      VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 *               pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
              typename Dispatch                              = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
                         getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
                                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
      typename Dispatch                              = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                     = SparseImageFormatProperties2Allocator,
      typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
                         getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
                                                          SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
                                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getExternalBufferProperties(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
      VULKAN_HPP_NAMESPACE::ExternalBufferProperties *               pExternalBufferProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties(
      const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
                                     VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties(
      const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getExternalSemaphoreProperties(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties *               pExternalSemaphoreProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties(
      const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getSurfaceSupportKHR( uint32_t                         queueFamilyIndex,
                                               VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                               VULKAN_HPP_NAMESPACE::Bool32 *   pSupported,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
      getSurfaceSupportKHR( uint32_t                         queueFamilyIndex,
                            VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR(
      VULKAN_HPP_NAMESPACE::SurfaceKHR               surface,
      VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
      getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR         surface,
                                               uint32_t *                               pSurfaceFormatCount,
                                               VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>,
              typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
      getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>,
              typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                         = SurfaceFormatKHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
      getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                            SurfaceFormatKHRAllocator &      surfaceFormatKHRAllocator,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR(
      VULKAN_HPP_NAMESPACE::SurfaceKHR       surface,
      uint32_t *                             pPresentModeCount,
      VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
      getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                       = PresentModeKHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
      getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                 PresentModeKHRAllocator &        presentModeKHRAllocator,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_swapchain ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                                  uint32_t *                       pRectCount,
                                                  VULKAN_HPP_NAMESPACE::Rect2D *   pRects,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Rect2DAllocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
      getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Rect2DAllocator = std::allocator<Rect2D>,
              typename Dispatch        = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B               = Rect2DAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
      getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                               Rect2DAllocator &                rect2DAllocator,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_display ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getDisplayPropertiesKHR( uint32_t *                                   pPropertyCount,
                                                  VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>,
              typename Dispatch                      = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
      getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>,
              typename Dispatch                      = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                             = DisplayPropertiesKHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
      getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR(
      uint32_t *                                        pPropertyCount,
      VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>,
              typename Dispatch                           = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
      getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>,
      typename Dispatch                           = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                  = DisplayPlanePropertiesKHRAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
      getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR(
      uint32_t                           planeIndex,
      uint32_t *                         pDisplayCount,
      VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>,
              typename Dispatch            = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
      getDisplayPlaneSupportedDisplaysKHR( uint32_t         planeIndex,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>,
              typename Dispatch            = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                   = DisplayKHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
      getDisplayPlaneSupportedDisplaysKHR( uint32_t              planeIndex,
                                           DisplayKHRAllocator & displayKHRAllocator,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR(
      VULKAN_HPP_NAMESPACE::DisplayKHR                 display,
      uint32_t *                                       pPropertyCount,
      VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>,
              typename Dispatch                          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
      getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>,
      typename Dispatch                          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                 = DisplayModePropertiesKHRAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
      getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR    display,
                                   DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR                       display,
                                               const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
                                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks *      pAllocator,
                                               VULKAN_HPP_NAMESPACE::DisplayModeKHR *                 pMode,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
      createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR    display,
                            const DisplayModeCreateInfoKHR &    createInfo,
                            Optional<const AllocationCallbacks> allocator
                                                                VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
      createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR    display,
                                  const DisplayModeCreateInfoKHR &    createInfo,
                                  Optional<const AllocationCallbacks> allocator
                                                                      VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR(
      VULKAN_HPP_NAMESPACE::DisplayModeKHR                mode,
      uint32_t                                            planeIndex,
      VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
      getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
                                      uint32_t                             planeIndex,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_XLIB_KHR )
    //=== VK_KHR_xlib_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getXlibPresentationSupportKHR( uint32_t         queueFamilyIndex,
                                          Display *        dpy,
                                          VisualID         visualID,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getXlibPresentationSupportKHR( uint32_t         queueFamilyIndex,
                                          Display &        dpy,
                                          VisualID         visualID,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_XLIB_KHR*/

#if defined( VK_USE_PLATFORM_XCB_KHR )
    //=== VK_KHR_xcb_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getXcbPresentationSupportKHR( uint32_t           queueFamilyIndex,
                                         xcb_connection_t * connection,
                                         xcb_visualid_t     visual_id,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getXcbPresentationSupportKHR( uint32_t           queueFamilyIndex,
                                         xcb_connection_t & connection,
                                         xcb_visualid_t     visual_id,
                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_XCB_KHR*/

#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
    //=== VK_KHR_wayland_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getWaylandPresentationSupportKHR( uint32_t            queueFamilyIndex,
                                             struct wl_display * display,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getWaylandPresentationSupportKHR( uint32_t            queueFamilyIndex,
                                             struct wl_display & display,
                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WAYLAND_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_KHR_win32_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getWin32PresentationSupportKHR( uint32_t         queueFamilyIndex,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
    //=== VK_KHR_video_queue ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile,
                                                  VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR *  pCapabilities,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
      getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
      getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
      uint32_t *                                                     pVideoFormatPropertyCount,
      VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR *               pVideoFormatProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename VideoFormatPropertiesKHRAllocator = std::allocator<VideoFormatPropertiesKHR>,
              typename Dispatch                          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
      getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename VideoFormatPropertiesKHRAllocator = std::allocator<VideoFormatPropertiesKHR>,
      typename Dispatch                          = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                 = VideoFormatPropertiesKHRAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, VideoFormatPropertiesKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
      getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
                                   VideoFormatPropertiesKHRAllocator &      videoFormatPropertiesKHRAllocator,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_ENABLE_BETA_EXTENSIONS*/

    //=== VK_NV_external_memory_capabilities ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV(
      VULKAN_HPP_NAMESPACE::Format                            format,
      VULKAN_HPP_NAMESPACE::ImageType                         type,
      VULKAN_HPP_NAMESPACE::ImageTiling                       tiling,
      VULKAN_HPP_NAMESPACE::ImageUsageFlags                   usage,
      VULKAN_HPP_NAMESPACE::ImageCreateFlags                  flags,
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV   externalHandleType,
      VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
      getExternalImageFormatPropertiesNV(
        VULKAN_HPP_NAMESPACE::Format           format,
        VULKAN_HPP_NAMESPACE::ImageType        type,
        VULKAN_HPP_NAMESPACE::ImageTiling      tiling,
        VULKAN_HPP_NAMESPACE::ImageUsageFlags  usage,
        VULKAN_HPP_NAMESPACE::ImageCreateFlags flags          VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
        VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_get_physical_device_properties2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
                         getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
                         getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
                         getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
                         getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format              format,
                               VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2
                         getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
                         getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
      VULKAN_HPP_NAMESPACE::ImageFormatProperties2 *               pImageFormatProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
      getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
      getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getQueueFamilyProperties2KHR( uint32_t *                                     pQueueFamilyPropertyCount,
                                       VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
              typename Dispatch                        = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
                         getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>,
      typename Dispatch                        = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                               = QueueFamilyProperties2Allocator,
      typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
                         getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
                                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename StructureChain,
              typename StructureChainAllocator = std::allocator<StructureChain>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
                         getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename StructureChain,
              typename StructureChainAllocator = std::allocator<StructureChain>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                       = StructureChainAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator>
                         getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator,
                                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
                         getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...>
                         getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getSparseImageFormatProperties2KHR(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
      uint32_t *                                                         pPropertyCount,
      VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 *               pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
              typename Dispatch                              = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
                         getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
                                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>,
      typename Dispatch                              = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                     = SparseImageFormatProperties2Allocator,
      typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
                         getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
                                                             SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
                                                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_external_memory_capabilities ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getExternalBufferPropertiesKHR(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
      VULKAN_HPP_NAMESPACE::ExternalBufferProperties *               pExternalBufferProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR(
      const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_external_semaphore_capabilities ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getExternalSemaphorePropertiesKHR(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties *               pExternalSemaphoreProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR(
      const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_direct_mode_display ===

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<void>::type
         releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
    //=== VK_EXT_acquire_xlib_display ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         acquireXlibDisplayEXT( Display *                        dpy,
                                                VULKAN_HPP_NAMESPACE::DisplayKHR display,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
      acquireXlibDisplayEXT( Display &                        dpy,
                             VULKAN_HPP_NAMESPACE::DisplayKHR display,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getRandROutputDisplayEXT( Display *                          dpy,
                                                   RROutput                           rrOutput,
                                                   VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT(
      Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
      getRandROutputDisplayEXTUnique( Display &        dpy,
                                      RROutput         rrOutput,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/

    //=== VK_EXT_display_surface_counter ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT(
      VULKAN_HPP_NAMESPACE::SurfaceKHR                surface,
      VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
      getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_external_fence_capabilities ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getExternalFencePropertiesKHR(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
      VULKAN_HPP_NAMESPACE::ExternalFenceProperties *               pExternalFenceProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR(
      const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_performance_query ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR(
      uint32_t                                                 queueFamilyIndex,
      uint32_t *                                               pCounterCount,
      VULKAN_HPP_NAMESPACE::PerformanceCounterKHR *            pCounters,
      VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>,
              typename Dispatch  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
      enumerateQueueFamilyPerformanceQueryCountersKHR(
        uint32_t                                                        queueFamilyIndex,
        ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename Allocator                 = std::allocator<PerformanceCounterDescriptionKHR>,
              typename Dispatch                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                         = Allocator,
              typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value,
                                      int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
      enumerateQueueFamilyPerformanceQueryCountersKHR(
        uint32_t                                                        queueFamilyIndex,
        ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
        Allocator const &                                               vectorAllocator,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PerformanceCounterKHRAllocator            = std::allocator<PerformanceCounterKHR>,
              typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>,
              typename Dispatch                                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
                std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
      enumerateQueueFamilyPerformanceQueryCountersKHR(
        uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PerformanceCounterKHRAllocator            = std::allocator<PerformanceCounterKHR>,
              typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>,
              typename Dispatch                                  = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B1                                        = PerformanceCounterKHRAllocator,
              typename B2                                        = PerformanceCounterDescriptionKHRAllocator,
              typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
                                        std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
                                      int>::type                 = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
                std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
      enumerateQueueFamilyPerformanceQueryCountersKHR(
        uint32_t                                    queueFamilyIndex,
        PerformanceCounterKHRAllocator &            performanceCounterKHRAllocator,
        PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getQueueFamilyPerformanceQueryPassesKHR(
      const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
      uint32_t *                                                      pNumPasses,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR(
      const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_get_surface_capabilities2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
      VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR *             pSurfaceCapabilities,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
      getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type
      getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
                                                uint32_t *                                                  pSurfaceFormatCount,
                                                VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR *                   pSurfaceFormats,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename SurfaceFormat2KHRAllocator = std::allocator<SurfaceFormat2KHR>,
              typename Dispatch                   = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
      getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename SurfaceFormat2KHRAllocator = std::allocator<SurfaceFormat2KHR>,
              typename Dispatch                   = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                          = SurfaceFormat2KHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
      getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                             SurfaceFormat2KHRAllocator &          surfaceFormat2KHRAllocator,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_get_display_properties2 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getDisplayProperties2KHR( uint32_t *                                    pPropertyCount,
                                                   VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>,
              typename Dispatch                       = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
      getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>,
      typename Dispatch                       = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                              = DisplayProperties2KHRAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
      getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR(
      uint32_t *                                         pPropertyCount,
      VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>,
              typename Dispatch                            = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
      getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>,
      typename Dispatch                            = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                   = DisplayPlaneProperties2KHRAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
      getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR(
      VULKAN_HPP_NAMESPACE::DisplayKHR                  display,
      uint32_t *                                        pPropertyCount,
      VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>,
              typename Dispatch                           = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
      getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>,
      typename Dispatch                           = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                  = DisplayModeProperties2KHRAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
      getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR     display,
                                    DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR(
      const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR *   pDisplayPlaneInfo,
      VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
      typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
      getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo,
                                       Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_sample_locations ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits        samples,
                                      VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT(
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_calibrated_timestamps ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT(
      uint32_t *                            pTimeDomainCount,
      VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>,
              typename Dispatch               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
      getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>,
              typename Dispatch               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                      = TimeDomainEXTAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
      getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_fragment_shading_rate ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR(
      uint32_t *                                                   pFragmentShadingRateCount,
      VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <
      typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>,
      typename Dispatch                                      = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
      getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>,
      typename Dispatch                                      = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                             = PhysicalDeviceFragmentShadingRateKHRAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value,
                              int>::type                     = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
      getFragmentShadingRatesKHR(
        PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_EXT_tooling_info ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getToolPropertiesEXT( uint32_t *                                              pToolCount,
                                               VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PhysicalDeviceToolPropertiesEXTAllocator = std::allocator<PhysicalDeviceToolPropertiesEXT>,
              typename Dispatch                                 = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type
      getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PhysicalDeviceToolPropertiesEXTAllocator = std::allocator<PhysicalDeviceToolPropertiesEXT>,
              typename Dispatch                                 = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                                        = PhysicalDeviceToolPropertiesEXTAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value,
                                      int>::type                = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type
      getToolPropertiesEXT( PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_NV_cooperative_matrix ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV(
      uint32_t *                                            pPropertyCount,
      VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>,
              typename Dispatch                               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
      getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>,
              typename Dispatch                               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                                      = CooperativeMatrixPropertiesNVAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value,
                                      int>::type              = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
      getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_NV_coverage_reduction_mode ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV(
      uint32_t *                                                   pCombinationCount,
      VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <
      typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>,
      typename Dispatch                                      = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
      getSupportedFramebufferMixedSamplesCombinationsNV(
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <
      typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>,
      typename Dispatch                                      = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
      typename B                                             = FramebufferMixedSamplesCombinationNVAllocator,
      typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value,
                              int>::type                     = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<
      std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
      getSupportedFramebufferMixedSamplesCombinationsNV(
        FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,
        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_EXT_full_screen_exclusive ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT(
      const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
      uint32_t *                                                  pPresentModeCount,
      VULKAN_HPP_NAMESPACE::PresentModeKHR *                      pPresentModes,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
      getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                       = PresentModeKHRAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
      getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                  PresentModeKHRAllocator &             presentModeKHRAllocator,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_NV_acquire_winrt_display ===

#  ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  else
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type
         acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         getWinrtDisplayNV( uint32_t                           deviceRelativeId,
                                            VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
      getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
      getWinrtDisplayNVUnique( uint32_t         deviceRelativeId,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
    //=== VK_EXT_directfb_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getDirectFBPresentationSupportEXT( uint32_t         queueFamilyIndex,
                                              IDirectFB *      dfb,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getDirectFBPresentationSupportEXT( uint32_t         queueFamilyIndex,
                                              IDirectFB &      dfb,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_DIRECTFB_EXT*/

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
    //=== VK_QNX_screen_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getScreenPresentationSupportQNX( uint32_t                queueFamilyIndex,
                                            struct _screen_window * window,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    Bool32 getScreenPresentationSupportQNX( uint32_t                queueFamilyIndex,
                                            struct _screen_window & window,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_SCREEN_QNX*/

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
    {
      return m_physicalDevice;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_physicalDevice != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_physicalDevice == VK_NULL_HANDLE;
    }

  private:
    VkPhysicalDevice m_physicalDevice = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::ePhysicalDevice>
  {
    using type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice>
  {
    using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice>
  {
    using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PhysicalDevice>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  struct DeviceGroupDeviceCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(
      uint32_t                                     physicalDeviceCount_ = {},
      const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_    = {} ) VULKAN_HPP_NOEXCEPT
      : physicalDeviceCount( physicalDeviceCount_ )
      , pPhysicalDevices( pPhysicalDevices_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupDeviceCreateInfo( *reinterpret_cast<DeviceGroupDeviceCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupDeviceCreateInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const &
        physicalDevices_ )
      : physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) )
      , pPhysicalDevices( physicalDevices_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo &
                            operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>( &rhs );
      return *this;
    }

    DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      physicalDeviceCount = physicalDeviceCount_;
      return *this;
    }

    DeviceGroupDeviceCreateInfo &
      setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
    {
      pPhysicalDevices = pPhysicalDevices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupDeviceCreateInfo & setPhysicalDevices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const &
        physicalDevices_ ) VULKAN_HPP_NOEXCEPT
    {
      physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
      pPhysicalDevices    = physicalDevices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo *>( this );
    }

    operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default;
#else
    bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) &&
             ( pPhysicalDevices == rhs.pPhysicalDevices );
    }

    bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType               = StructureType::eDeviceGroupDeviceCreateInfo;
    const void *                                 pNext               = {};
    uint32_t                                     physicalDeviceCount = {};
    const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices    = {};
  };
  static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceGroupDeviceCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
  {
    using Type = DeviceGroupDeviceCreateInfo;
  };
  using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;

  struct DeviceGroupPresentInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DeviceGroupPresentInfoKHR( uint32_t                                                swapchainCount_ = {},
                                 const uint32_t *                                        pDeviceMasks_   = {},
                                 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ =
                                   VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT
      : swapchainCount( swapchainCount_ )
      , pDeviceMasks( pDeviceMasks_ )
      , mode( mode_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupPresentInfoKHR( *reinterpret_cast<DeviceGroupPresentInfoKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_,
                               VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR               mode_ =
                                 VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal )
      : swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) )
      , pDeviceMasks( deviceMasks_.data() )
      , mode( mode_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR &
                            operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>( &rhs );
      return *this;
    }

    DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceMasks = pDeviceMasks_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupPresentInfoKHR & setDeviceMasks(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
      pDeviceMasks   = deviceMasks_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceGroupPresentInfoKHR &
      setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
    {
      mode = mode_;
      return *this;
    }

    operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR *>( this );
    }

    operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupPresentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default;
#else
    bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) &&
             ( pDeviceMasks == rhs.pDeviceMasks ) && ( mode == rhs.mode );
    }

    bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                     sType          = StructureType::eDeviceGroupPresentInfoKHR;
    const void *                                            pNext          = {};
    uint32_t                                                swapchainCount = {};
    const uint32_t *                                        pDeviceMasks   = {};
    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode =
      VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
  };
  static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceGroupPresentInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
  {
    using Type = DeviceGroupPresentInfoKHR;
  };

  struct DeviceGroupRenderPassBeginInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_                                     = {},
                                                         uint32_t deviceRenderAreaCount_                          = {},
                                                         const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {} )
      VULKAN_HPP_NOEXCEPT
      : deviceMask( deviceMask_ )
      , deviceRenderAreaCount( deviceRenderAreaCount_ )
      , pDeviceRenderAreas( pDeviceRenderAreas_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupRenderPassBeginInfo( *reinterpret_cast<DeviceGroupRenderPassBeginInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupRenderPassBeginInfo(
      uint32_t                                                                                  deviceMask_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ )
      : deviceMask( deviceMask_ )
      , deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) )
      , pDeviceRenderAreas( deviceRenderAreas_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo &
                            operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>( &rhs );
      return *this;
    }

    DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }

    DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceRenderAreaCount = deviceRenderAreaCount_;
      return *this;
    }

    DeviceGroupRenderPassBeginInfo &
      setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
    {
      pDeviceRenderAreas = pDeviceRenderAreas_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ )
      VULKAN_HPP_NOEXCEPT
    {
      deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
      pDeviceRenderAreas    = deviceRenderAreas_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo *>( this );
    }

    operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default;
#else
    bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) &&
             ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
    }

    bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType  sType                 = StructureType::eDeviceGroupRenderPassBeginInfo;
    const void *                         pNext                 = {};
    uint32_t                             deviceMask            = {};
    uint32_t                             deviceRenderAreaCount = {};
    const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas    = {};
  };
  static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceGroupRenderPassBeginInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
  {
    using Type = DeviceGroupRenderPassBeginInfo;
  };
  using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;

  struct DeviceGroupSubmitInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DeviceGroupSubmitInfo( uint32_t         waitSemaphoreCount_            = {},
                             const uint32_t * pWaitSemaphoreDeviceIndices_   = {},
                             uint32_t         commandBufferCount_            = {},
                             const uint32_t * pCommandBufferDeviceMasks_     = {},
                             uint32_t         signalSemaphoreCount_          = {},
                             const uint32_t * pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT
      : waitSemaphoreCount( waitSemaphoreCount_ )
      , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ )
      , commandBufferCount( commandBufferCount_ )
      , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ )
      , signalSemaphoreCount( signalSemaphoreCount_ )
      , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupSubmitInfo( *reinterpret_cast<DeviceGroupSubmitInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupSubmitInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_     = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {} )
      : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) )
      , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() )
      , commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) )
      , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() )
      , signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) )
      , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo &
                            operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>( &rhs );
      return *this;
    }

    DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount = waitSemaphoreCount_;
      return *this;
    }

    DeviceGroupSubmitInfo &
      setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ )
      VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreCount          = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
      pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount = commandBufferCount_;
      return *this;
    }

    DeviceGroupSubmitInfo &
      setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupSubmitInfo & setCommandBufferDeviceMasks(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ )
      VULKAN_HPP_NOEXCEPT
    {
      commandBufferCount        = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
      pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount = signalSemaphoreCount_;
      return *this;
    }

    DeviceGroupSubmitInfo &
      setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ )
      VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreCount          = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
      pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupSubmitInfo *>( this );
    }

    operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupSubmitInfo const & ) const = default;
#else
    bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) &&
             ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) &&
             ( commandBufferCount == rhs.commandBufferCount ) &&
             ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) &&
             ( signalSemaphoreCount == rhs.signalSemaphoreCount ) &&
             ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
    }

    bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                         = StructureType::eDeviceGroupSubmitInfo;
    const void *                        pNext                         = {};
    uint32_t                            waitSemaphoreCount            = {};
    const uint32_t *                    pWaitSemaphoreDeviceIndices   = {};
    uint32_t                            commandBufferCount            = {};
    const uint32_t *                    pCommandBufferDeviceMasks     = {};
    uint32_t                            signalSemaphoreCount          = {};
    const uint32_t *                    pSignalSemaphoreDeviceIndices = {};
  };
  static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
  {
    using Type = DeviceGroupSubmitInfo;
  };
  using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;

  struct DeviceGroupSwapchainCreateInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDeviceGroupSwapchainCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(
      VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT : modes( modes_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast<DeviceGroupSwapchainCreateInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR &
                            operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>( &rhs );
      return *this;
    }

    DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceGroupSwapchainCreateInfoKHR &
      setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
    {
      modes = modes_;
      return *this;
    }

    operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR *>( this );
    }

    operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default;
#else
    bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes );
    }

    bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                  sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
    const void *                                         pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
  };
  static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceGroupSwapchainCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
  {
    using Type = DeviceGroupSwapchainCreateInfoKHR;
  };

  struct DeviceMemoryOverallocationCreateInfoAMD
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDeviceMemoryOverallocationCreateInfoAMD;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(
      VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ =
        VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT
      : overallocationBehavior( overallocationBehavior_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceMemoryOverallocationCreateInfoAMD(
          *reinterpret_cast<DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD &
                            operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceMemoryOverallocationCreateInfoAMD &
      operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs );
      return *this;
    }

    DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior(
      VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
    {
      overallocationBehavior = overallocationBehavior_;
      return *this;
    }

    operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD *>( this );
    }

    operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default;
#else
    bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( overallocationBehavior == rhs.overallocationBehavior );
    }

    bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior =
      VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
  };
  static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) ==
                   sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceMemoryOverallocationCreateInfoAMD>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceMemoryOverallocationCreateInfoAMD>
  {
    using Type = DeviceMemoryOverallocationCreateInfoAMD;
  };

  struct DeviceMemoryReportCallbackDataEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDeviceMemoryReportCallbackDataEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT(
      VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT     flags_ = {},
      VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ =
        VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate,
      uint64_t                         memoryObjectId_ = {},
      VULKAN_HPP_NAMESPACE::DeviceSize size_           = {},
      VULKAN_HPP_NAMESPACE::ObjectType objectType_     = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown,
      uint64_t                         objectHandle_   = {},
      uint32_t                         heapIndex_      = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , type( type_ )
      , memoryObjectId( memoryObjectId_ )
      , size( size_ )
      , objectType( objectType_ )
      , objectHandle( objectHandle_ )
      , heapIndex( heapIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast<DeviceMemoryReportCallbackDataEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceMemoryReportCallbackDataEXT &
                            operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const *>( &rhs );
      return *this;
    }

    operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT *>( this );
    }

    operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default;
#else
    bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) &&
             ( memoryObjectId == rhs.memoryObjectId ) && ( size == rhs.size ) && ( objectType == rhs.objectType ) &&
             ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex );
    }

    bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                  sType = StructureType::eDeviceMemoryReportCallbackDataEXT;
    const void *                                         pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT     flags = {};
    VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type =
      VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate;
    uint64_t                         memoryObjectId = {};
    VULKAN_HPP_NAMESPACE::DeviceSize size           = {};
    VULKAN_HPP_NAMESPACE::ObjectType objectType     = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
    uint64_t                         objectHandle   = {};
    uint32_t                         heapIndex      = {};
  };
  static_assert( sizeof( DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceMemoryReportCallbackDataEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceMemoryReportCallbackDataEXT>
  {
    using Type = DeviceMemoryReportCallbackDataEXT;
  };

  struct DevicePrivateDataCreateInfoEXT
  {
    static const bool                    allowDuplicate              = true;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DevicePrivateDataCreateInfoEXT( uint32_t privateDataSlotRequestCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : privateDataSlotRequestCount( privateDataSlotRequestCount_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DevicePrivateDataCreateInfoEXT( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DevicePrivateDataCreateInfoEXT( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DevicePrivateDataCreateInfoEXT( *reinterpret_cast<DevicePrivateDataCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfoEXT &
                            operator=( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DevicePrivateDataCreateInfoEXT & operator=( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfoEXT const *>( &rhs );
      return *this;
    }

    DevicePrivateDataCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DevicePrivateDataCreateInfoEXT &
      setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
    {
      privateDataSlotRequestCount = privateDataSlotRequestCount_;
      return *this;
    }

    operator VkDevicePrivateDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDevicePrivateDataCreateInfoEXT *>( this );
    }

    operator VkDevicePrivateDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDevicePrivateDataCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DevicePrivateDataCreateInfoEXT const & ) const = default;
#else
    bool operator==( DevicePrivateDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
    }

    bool operator!=( DevicePrivateDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                       = StructureType::eDevicePrivateDataCreateInfoEXT;
    const void *                        pNext                       = {};
    uint32_t                            privateDataSlotRequestCount = {};
  };
  static_assert( sizeof( DevicePrivateDataCreateInfoEXT ) == sizeof( VkDevicePrivateDataCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DevicePrivateDataCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfoEXT>
  {
    using Type = DevicePrivateDataCreateInfoEXT;
  };

  struct DeviceQueueGlobalPriorityCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ =
                                                VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT
      : globalPriority( globalPriority_ )
    {}

    VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DeviceQueueGlobalPriorityCreateInfoEXT(
          *reinterpret_cast<DeviceQueueGlobalPriorityCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoEXT &
                            operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DeviceQueueGlobalPriorityCreateInfoEXT &
      operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const *>( &rhs );
      return *this;
    }

    DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DeviceQueueGlobalPriorityCreateInfoEXT &
      setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT
    {
      globalPriority = globalPriority_;
      return *this;
    }

    operator VkDeviceQueueGlobalPriorityCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT *>( this );
    }

    operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const & ) const = default;
#else
    bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority );
    }

    bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
    const void *                                 pNext = {};
    VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow;
  };
  static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DeviceQueueGlobalPriorityCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT>
  {
    using Type = DeviceQueueGlobalPriorityCreateInfoEXT;
  };

#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  struct DirectFBSurfaceCreateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {},
                                                       IDirectFB *                                         dfb_   = {},
                                                       IDirectFBSurface * surface_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , dfb( dfb_ )
      , surface( surface_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast<DirectFBSurfaceCreateInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT &
                            operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const *>( &rhs );
      return *this;
    }

    DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DirectFBSurfaceCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT
    {
      dfb = dfb_;
      return *this;
    }

    DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT
    {
      surface = surface_;
      return *this;
    }

    operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( this );
    }

    operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default;
#  else
    bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) &&
             ( surface == rhs.surface );
    }

    bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType   = StructureType::eDirectfbSurfaceCreateInfoEXT;
    const void *                                        pNext   = {};
    VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags   = {};
    IDirectFB *                                         dfb     = {};
    IDirectFBSurface *                                  surface = {};
  };
  static_assert( sizeof( DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DirectFBSurfaceCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDirectfbSurfaceCreateInfoEXT>
  {
    using Type = DirectFBSurfaceCreateInfoEXT;
  };
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/

  struct DispatchIndirectCommand
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT
      : x( x_ )
      , y( y_ )
      , z( z_ )
    {}

    VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
      : DispatchIndirectCommand( *reinterpret_cast<DispatchIndirectCommand const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand &
                            operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>( &rhs );
      return *this;
    }

    DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
    {
      z = z_;
      return *this;
    }

    operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDispatchIndirectCommand *>( this );
    }

    operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDispatchIndirectCommand *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DispatchIndirectCommand const & ) const = default;
#else
    bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z );
    }

    bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t x = {};
    uint32_t y = {};
    uint32_t z = {};
  };
  static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );

  struct DisplayNativeHdrSurfaceCapabilitiesAMD
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(
      VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT
      : localDimmingSupport( localDimmingSupport_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayNativeHdrSurfaceCapabilitiesAMD(
          *reinterpret_cast<DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayNativeHdrSurfaceCapabilitiesAMD &
                            operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayNativeHdrSurfaceCapabilitiesAMD &
      operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs );
      return *this;
    }

    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD *>( this );
    }

    operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default;
#else
    bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport );
    }

    bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
    void *                              pNext               = {};
    VULKAN_HPP_NAMESPACE::Bool32        localDimmingSupport = {};
  };
  static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayNativeHdrSurfaceCapabilitiesAMD>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD>
  {
    using Type = DisplayNativeHdrSurfaceCapabilitiesAMD;
  };

  struct DisplayPresentInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_    = {},
                                                VULKAN_HPP_NAMESPACE::Rect2D dstRect_    = {},
                                                VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT
      : srcRect( srcRect_ )
      , dstRect( dstRect_ )
      , persistent( persistent_ )
    {}

    VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplayPresentInfoKHR( *reinterpret_cast<DisplayPresentInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR &
                            operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>( &rhs );
      return *this;
    }

    DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
    {
      srcRect = srcRect_;
      return *this;
    }

    DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
    {
      dstRect = dstRect_;
      return *this;
    }

    DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
    {
      persistent = persistent_;
      return *this;
    }

    operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplayPresentInfoKHR *>( this );
    }

    operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplayPresentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplayPresentInfoKHR const & ) const = default;
#else
    bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) &&
             ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent );
    }

    bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType      = StructureType::eDisplayPresentInfoKHR;
    const void *                        pNext      = {};
    VULKAN_HPP_NAMESPACE::Rect2D        srcRect    = {};
    VULKAN_HPP_NAMESPACE::Rect2D        dstRect    = {};
    VULKAN_HPP_NAMESPACE::Bool32        persistent = {};
  };
  static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
  {
    using Type = DisplayPresentInfoKHR;
  };

  struct DisplaySurfaceCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_           = {},
                                   VULKAN_HPP_NAMESPACE::DisplayModeKHR               displayMode_     = {},
                                   uint32_t                                           planeIndex_      = {},
                                   uint32_t                                           planeStackIndex_ = {},
                                   VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR  transform_ =
                                     VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity,
                                   float                                              globalAlpha_ = {},
                                   VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ =
                                     VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque,
                                   VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , displayMode( displayMode_ )
      , planeIndex( planeIndex_ )
      , planeStackIndex( planeStackIndex_ )
      , transform( transform_ )
      , globalAlpha( globalAlpha_ )
      , alphaMode( alphaMode_ )
      , imageExtent( imageExtent_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : DisplaySurfaceCreateInfoKHR( *reinterpret_cast<DisplaySurfaceCreateInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR &
                            operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

    DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    DisplaySurfaceCreateInfoKHR &
      setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    DisplaySurfaceCreateInfoKHR &
      setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
    {
      displayMode = displayMode_;
      return *this;
    }

    DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      planeIndex = planeIndex_;
      return *this;
    }

    DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      planeStackIndex = planeStackIndex_;
      return *this;
    }

    DisplaySurfaceCreateInfoKHR &
      setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }

    DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
    {
      globalAlpha = globalAlpha_;
      return *this;
    }

    DisplaySurfaceCreateInfoKHR &
      setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
    {
      alphaMode = alphaMode_;
      return *this;
    }

    DisplaySurfaceCreateInfoKHR &
      setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
    {
      imageExtent = imageExtent_;
      return *this;
    }

    operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( this );
    }

    operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default;
#else
    bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( displayMode == rhs.displayMode ) && ( planeIndex == rhs.planeIndex ) &&
             ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) &&
             ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent );
    }

    bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType           = StructureType::eDisplaySurfaceCreateInfoKHR;
    const void *                                       pNext           = {};
    VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags           = {};
    VULKAN_HPP_NAMESPACE::DisplayModeKHR               displayMode     = {};
    uint32_t                                           planeIndex      = {};
    uint32_t                                           planeStackIndex = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR  transform =
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
    float                                              globalAlpha = {};
    VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode =
      VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
    VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
  };
  static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DisplaySurfaceCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
  {
    using Type = DisplaySurfaceCreateInfoKHR;
  };

  struct DrawIndexedIndirectCommand
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_    = {},
                                                     uint32_t instanceCount_ = {},
                                                     uint32_t firstIndex_    = {},
                                                     int32_t  vertexOffset_  = {},
                                                     uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
      : indexCount( indexCount_ )
      , instanceCount( instanceCount_ )
      , firstIndex( firstIndex_ )
      , vertexOffset( vertexOffset_ )
      , firstInstance( firstInstance_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrawIndexedIndirectCommand( *reinterpret_cast<DrawIndexedIndirectCommand const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand &
                            operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>( &rhs );
      return *this;
    }

    DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      indexCount = indexCount_;
      return *this;
    }

    DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCount = instanceCount_;
      return *this;
    }

    DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      firstIndex = firstIndex_;
      return *this;
    }

    DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexOffset = vertexOffset_;
      return *this;
    }

    DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
    {
      firstInstance = firstInstance_;
      return *this;
    }

    operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrawIndexedIndirectCommand *>( this );
    }

    operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrawIndexedIndirectCommand *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrawIndexedIndirectCommand const & ) const = default;
#else
    bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) &&
             ( firstIndex == rhs.firstIndex ) && ( vertexOffset == rhs.vertexOffset ) &&
             ( firstInstance == rhs.firstInstance );
    }

    bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t indexCount    = {};
    uint32_t instanceCount = {};
    uint32_t firstIndex    = {};
    int32_t  vertexOffset  = {};
    uint32_t firstInstance = {};
  };
  static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DrawIndexedIndirectCommand>::value,
                 "struct wrapper is not a standard layout!" );

  struct DrawIndirectCommand
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_   = {},
                                              uint32_t instanceCount_ = {},
                                              uint32_t firstVertex_   = {},
                                              uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT
      : vertexCount( vertexCount_ )
      , instanceCount( instanceCount_ )
      , firstVertex( firstVertex_ )
      , firstInstance( firstInstance_ )
    {}

    VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrawIndirectCommand( *reinterpret_cast<DrawIndirectCommand const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand &
                            operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>( &rhs );
      return *this;
    }

    DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexCount = vertexCount_;
      return *this;
    }

    DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      instanceCount = instanceCount_;
      return *this;
    }

    DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
    {
      firstVertex = firstVertex_;
      return *this;
    }

    DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
    {
      firstInstance = firstInstance_;
      return *this;
    }

    operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrawIndirectCommand *>( this );
    }

    operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrawIndirectCommand *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrawIndirectCommand const & ) const = default;
#else
    bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) &&
             ( firstVertex == rhs.firstVertex ) && ( firstInstance == rhs.firstInstance );
    }

    bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t vertexCount   = {};
    uint32_t instanceCount = {};
    uint32_t firstVertex   = {};
    uint32_t firstInstance = {};
  };
  static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );

  struct DrawMeshTasksIndirectCommandNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {},
                                                         uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT
      : taskCount( taskCount_ )
      , firstTask( firstTask_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrawMeshTasksIndirectCommandNV( *reinterpret_cast<DrawMeshTasksIndirectCommandNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV &
                            operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>( &rhs );
      return *this;
    }

    DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
    {
      taskCount = taskCount_;
      return *this;
    }

    DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
    {
      firstTask = firstTask_;
      return *this;
    }

    operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV *>( this );
    }

    operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default;
#else
    bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask );
    }

    bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t taskCount = {};
    uint32_t firstTask = {};
  };
  static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DrawMeshTasksIndirectCommandNV>::value,
                 "struct wrapper is not a standard layout!" );

  struct DrmFormatModifierPropertiesEXT
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(
      uint64_t                                 drmFormatModifier_               = {},
      uint32_t                                 drmFormatModifierPlaneCount_     = {},
      VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
      : drmFormatModifier( drmFormatModifier_ )
      , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
      , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
    {}

    VULKAN_HPP_CONSTEXPR
      DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrmFormatModifierPropertiesEXT( *reinterpret_cast<DrmFormatModifierPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DrmFormatModifierPropertiesEXT &
                            operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default;
#else
    bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( drmFormatModifier == rhs.drmFormatModifier ) &&
             ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
             ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
    }

    bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint64_t                                 drmFormatModifier               = {};
    uint32_t                                 drmFormatModifierPlaneCount     = {};
    VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
  };
  static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DrmFormatModifierPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  struct DrmFormatModifierPropertiesListEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eDrmFormatModifierPropertiesListEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(
      uint32_t                                               drmFormatModifierCount_       = {},
      VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : drmFormatModifierCount( drmFormatModifierCount_ )
      , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : DrmFormatModifierPropertiesListEXT( *reinterpret_cast<DrmFormatModifierPropertiesListEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    DrmFormatModifierPropertiesListEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT> const &
        drmFormatModifierProperties_ )
      : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) )
      , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 DrmFormatModifierPropertiesListEXT &
                            operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    DrmFormatModifierPropertiesListEXT &
      operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>( &rhs );
      return *this;
    }

    operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT *>( this );
    }

    operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default;
#else
    bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
             ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
    }

    bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType = StructureType::eDrmFormatModifierPropertiesListEXT;
    void *                                                 pNext = {};
    uint32_t                                               drmFormatModifierCount       = {};
    VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {};
  };
  static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<DrmFormatModifierPropertiesListEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
  {
    using Type = DrmFormatModifierPropertiesListEXT;
  };

  struct ExportFenceCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleTypes( handleTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportFenceCreateInfo( *reinterpret_cast<ExportFenceCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo &
                            operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>( &rhs );
      return *this;
    }

    ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExportFenceCreateInfo &
      setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }

    operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportFenceCreateInfo *>( this );
    }

    operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportFenceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportFenceCreateInfo const & ) const = default;
#else
    bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
    }

    bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType       = StructureType::eExportFenceCreateInfo;
    const void *                                       pNext       = {};
    VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
  };
  static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
  {
    using Type = ExportFenceCreateInfo;
  };
  using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct ExportFenceWin32HandleInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {},
                                                        DWORD                       dwAccess_    = {},
                                                        LPCWSTR                     name_ = {} ) VULKAN_HPP_NOEXCEPT
      : pAttributes( pAttributes_ )
      , dwAccess( dwAccess_ )
      , name( name_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportFenceWin32HandleInfoKHR( *reinterpret_cast<ExportFenceWin32HandleInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR &
                            operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

    ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttributes = pAttributes_;
      return *this;
    }

    ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      dwAccess = dwAccess_;
      return *this;
    }

    ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }

    operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR *>( this );
    }

    operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) &&
             ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
    }

    bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType       = StructureType::eExportFenceWin32HandleInfoKHR;
    const void *                        pNext       = {};
    const SECURITY_ATTRIBUTES *         pAttributes = {};
    DWORD                               dwAccess    = {};
    LPCWSTR                             name        = {};
  };
  static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExportFenceWin32HandleInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExportFenceWin32HandleInfoKHR>
  {
    using Type = ExportFenceWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  struct ExportMemoryAllocateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleTypes( handleTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMemoryAllocateInfo( *reinterpret_cast<ExportMemoryAllocateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo &
                            operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>( &rhs );
      return *this;
    }

    ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExportMemoryAllocateInfo &
      setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }

    operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMemoryAllocateInfo *>( this );
    }

    operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMemoryAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMemoryAllocateInfo const & ) const = default;
#else
    bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
    }

    bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType       = StructureType::eExportMemoryAllocateInfo;
    const void *                                        pNext       = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
  };
  static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
  {
    using Type = ExportMemoryAllocateInfo;
  };
  using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;

  struct ExportMemoryAllocateInfoNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleTypes( handleTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMemoryAllocateInfoNV( *reinterpret_cast<ExportMemoryAllocateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV &
                            operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>( &rhs );
      return *this;
    }

    ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExportMemoryAllocateInfoNV &
      setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }

    operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMemoryAllocateInfoNV *>( this );
    }

    operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMemoryAllocateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default;
#else
    bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
    }

    bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType       = StructureType::eExportMemoryAllocateInfoNV;
    const void *                                          pNext       = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
  };
  static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExportMemoryAllocateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExportMemoryAllocateInfoNV>
  {
    using Type = ExportMemoryAllocateInfoNV;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct ExportMemoryWin32HandleInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {},
                                                         DWORD                       dwAccess_    = {},
                                                         LPCWSTR                     name_ = {} ) VULKAN_HPP_NOEXCEPT
      : pAttributes( pAttributes_ )
      , dwAccess( dwAccess_ )
      , name( name_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast<ExportMemoryWin32HandleInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR &
                            operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

    ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttributes = pAttributes_;
      return *this;
    }

    ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      dwAccess = dwAccess_;
      return *this;
    }

    ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }

    operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR *>( this );
    }

    operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) &&
             ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
    }

    bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType       = StructureType::eExportMemoryWin32HandleInfoKHR;
    const void *                        pNext       = {};
    const SECURITY_ATTRIBUTES *         pAttributes = {};
    DWORD                               dwAccess    = {};
    LPCWSTR                             name        = {};
  };
  static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoKHR>
  {
    using Type = ExportMemoryWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct ExportMemoryWin32HandleInfoNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {},
                                                        DWORD                       dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT
      : pAttributes( pAttributes_ )
      , dwAccess( dwAccess_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportMemoryWin32HandleInfoNV( *reinterpret_cast<ExportMemoryWin32HandleInfoNV const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV &
                            operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>( &rhs );
      return *this;
    }

    ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttributes = pAttributes_;
      return *this;
    }

    ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      dwAccess = dwAccess_;
      return *this;
    }

    operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV *>( this );
    }

    operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default;
#  else
    bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) &&
             ( dwAccess == rhs.dwAccess );
    }

    bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType       = StructureType::eExportMemoryWin32HandleInfoNV;
    const void *                        pNext       = {};
    const SECURITY_ATTRIBUTES *         pAttributes = {};
    DWORD                               dwAccess    = {};
  };
  static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoNV>
  {
    using Type = ExportMemoryWin32HandleInfoNV;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  struct ExportSemaphoreCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(
      VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleTypes( handleTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportSemaphoreCreateInfo( *reinterpret_cast<ExportSemaphoreCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo &
                            operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>( &rhs );
      return *this;
    }

    ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExportSemaphoreCreateInfo &
      setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }

    operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportSemaphoreCreateInfo *>( this );
    }

    operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportSemaphoreCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default;
#else
    bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
    }

    bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType       = StructureType::eExportSemaphoreCreateInfo;
    const void *                                           pNext       = {};
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
  };
  static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExportSemaphoreCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
  {
    using Type = ExportSemaphoreCreateInfo;
  };
  using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct ExportSemaphoreWin32HandleInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eExportSemaphoreWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {},
                                                            DWORD                       dwAccess_    = {},
                                                            LPCWSTR                     name_ = {} ) VULKAN_HPP_NOEXCEPT
      : pAttributes( pAttributes_ )
      , dwAccess( dwAccess_ )
      , name( name_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ExportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR &
                            operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

    ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttributes = pAttributes_;
      return *this;
    }

    ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      dwAccess = dwAccess_;
      return *this;
    }

    ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }

    operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR *>( this );
    }

    operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) &&
             ( dwAccess == rhs.dwAccess ) && ( name == rhs.name );
    }

    bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType       = StructureType::eExportSemaphoreWin32HandleInfoKHR;
    const void *                        pNext       = {};
    const SECURITY_ATTRIBUTES *         pAttributes = {};
    DWORD                               dwAccess    = {};
    LPCWSTR                             name        = {};
  };
  static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExportSemaphoreWin32HandleInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExportSemaphoreWin32HandleInfoKHR>
  {
    using Type = ExportSemaphoreWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  struct ExternalFormatANDROID
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT
      : externalFormat( externalFormat_ )
    {}

    VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalFormatANDROID( *reinterpret_cast<ExternalFormatANDROID const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID &
                            operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>( &rhs );
      return *this;
    }

    ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      externalFormat = externalFormat_;
      return *this;
    }

    operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalFormatANDROID *>( this );
    }

    operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalFormatANDROID *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalFormatANDROID const & ) const = default;
#  else
    bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat );
    }

    bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eExternalFormatANDROID;
    void *                              pNext          = {};
    uint64_t                            externalFormat = {};
  };
  static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExternalFormatANDROID>
  {
    using Type = ExternalFormatANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

  struct ExternalImageFormatProperties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(
      VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT
      : externalMemoryProperties( externalMemoryProperties_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalImageFormatProperties( *reinterpret_cast<ExternalImageFormatProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalImageFormatProperties &
                            operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>( &rhs );
      return *this;
    }

    operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalImageFormatProperties *>( this );
    }

    operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalImageFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalImageFormatProperties const & ) const = default;
#else
    bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( externalMemoryProperties == rhs.externalMemoryProperties );
    }

    bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType = StructureType::eExternalImageFormatProperties;
    void *                                         pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
  };
  static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalImageFormatProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
  {
    using Type = ExternalImageFormatProperties;
  };
  using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;

  struct ExternalMemoryBufferCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleTypes( handleTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryBufferCreateInfo( *reinterpret_cast<ExternalMemoryBufferCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo &
                            operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>( &rhs );
      return *this;
    }

    ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExternalMemoryBufferCreateInfo &
      setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }

    operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>( this );
    }

    operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryBufferCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default;
#else
    bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
    }

    bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType       = StructureType::eExternalMemoryBufferCreateInfo;
    const void *                                        pNext       = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
  };
  static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalMemoryBufferCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
  {
    using Type = ExternalMemoryBufferCreateInfo;
  };
  using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;

  struct ExternalMemoryImageCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleTypes( handleTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryImageCreateInfo( *reinterpret_cast<ExternalMemoryImageCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo &
                            operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>( &rhs );
      return *this;
    }

    ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExternalMemoryImageCreateInfo &
      setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }

    operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryImageCreateInfo *>( this );
    }

    operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryImageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default;
#else
    bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
    }

    bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType       = StructureType::eExternalMemoryImageCreateInfo;
    const void *                                        pNext       = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
  };
  static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
  {
    using Type = ExternalMemoryImageCreateInfo;
  };
  using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;

  struct ExternalMemoryImageCreateInfoNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleTypes( handleTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ExternalMemoryImageCreateInfoNV( *reinterpret_cast<ExternalMemoryImageCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV &
                            operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>( &rhs );
      return *this;
    }

    ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ExternalMemoryImageCreateInfoNV &
      setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      handleTypes = handleTypes_;
      return *this;
    }

    operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV *>( this );
    }

    operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default;
#else
    bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes );
    }

    bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType       = StructureType::eExternalMemoryImageCreateInfoNV;
    const void *                                          pNext       = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
  };
  static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfoNV>
  {
    using Type = ExternalMemoryImageCreateInfoNV;
  };

  struct FilterCubicImageViewImageFormatPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 filterCubic_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT
      : filterCubic( filterCubic_ )
      , filterCubicMinmax( filterCubicMinmax_ )
    {}

    VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(
      FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : FilterCubicImageViewImageFormatPropertiesEXT(
          *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FilterCubicImageViewImageFormatPropertiesEXT &
                            operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FilterCubicImageViewImageFormatPropertiesEXT &
      operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
    }

    operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default;
#else
    bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) &&
             ( filterCubicMinmax == rhs.filterCubicMinmax );
    }

    bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType       = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
    void *                              pNext       = {};
    VULKAN_HPP_NAMESPACE::Bool32        filterCubic = {};
    VULKAN_HPP_NAMESPACE::Bool32        filterCubicMinmax = {};
  };
  static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) ==
                   sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FilterCubicImageViewImageFormatPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
  {
    using Type = FilterCubicImageViewImageFormatPropertiesEXT;
  };

  struct FragmentShadingRateAttachmentInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eFragmentShadingRateAttachmentInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR(
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {},
      VULKAN_HPP_NAMESPACE::Extent2D                     shadingRateAttachmentTexelSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ )
      , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
    {}

    VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<FragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR &
                            operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FragmentShadingRateAttachmentInfoKHR &
      operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const *>( &rhs );
      return *this;
    }

    FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment(
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_;
      return *this;
    }

    FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize(
      VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
      return *this;
    }

    operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR *>( this );
    }

    operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default;
#else
    bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) &&
             ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
    }

    bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
    const void *                                       pNext = {};
    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {};
    VULKAN_HPP_NAMESPACE::Extent2D                     shadingRateAttachmentTexelSize = {};
  };
  static_assert( sizeof( FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FragmentShadingRateAttachmentInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFragmentShadingRateAttachmentInfoKHR>
  {
    using Type = FragmentShadingRateAttachmentInfoKHR;
  };

  struct FramebufferAttachmentImageInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_           = {},
                                      VULKAN_HPP_NAMESPACE::ImageUsageFlags  usage_           = {},
                                      uint32_t                               width_           = {},
                                      uint32_t                               height_          = {},
                                      uint32_t                               layerCount_      = {},
                                      uint32_t                               viewFormatCount_ = {},
                                      const VULKAN_HPP_NAMESPACE::Format *   pViewFormats_    = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , usage( usage_ )
      , width( width_ )
      , height( height_ )
      , layerCount( layerCount_ )
      , viewFormatCount( viewFormatCount_ )
      , pViewFormats( pViewFormats_ )
    {}

    VULKAN_HPP_CONSTEXPR
      FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : FramebufferAttachmentImageInfo( *reinterpret_cast<FramebufferAttachmentImageInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferAttachmentImageInfo(
      VULKAN_HPP_NAMESPACE::ImageCreateFlags                                                    flags_,
      VULKAN_HPP_NAMESPACE::ImageUsageFlags                                                     usage_,
      uint32_t                                                                                  width_,
      uint32_t                                                                                  height_,
      uint32_t                                                                                  layerCount_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
      : flags( flags_ )
      , usage( usage_ )
      , width( width_ )
      , height( height_ )
      , layerCount( layerCount_ )
      , viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) )
      , pViewFormats( viewFormats_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo &
                            operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>( &rhs );
      return *this;
    }

    FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      layerCount = layerCount_;
      return *this;
    }

    FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewFormatCount = viewFormatCount_;
      return *this;
    }

    FramebufferAttachmentImageInfo &
      setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewFormats = pViewFormats_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferAttachmentImageInfo & setViewFormats(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
      VULKAN_HPP_NOEXCEPT
    {
      viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
      pViewFormats    = viewFormats_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFramebufferAttachmentImageInfo *>( this );
    }

    operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFramebufferAttachmentImageInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default;
#else
    bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) &&
             ( width == rhs.width ) && ( height == rhs.height ) && ( layerCount == rhs.layerCount ) &&
             ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats );
    }

    bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType           = StructureType::eFramebufferAttachmentImageInfo;
    const void *                           pNext           = {};
    VULKAN_HPP_NAMESPACE::ImageCreateFlags flags           = {};
    VULKAN_HPP_NAMESPACE::ImageUsageFlags  usage           = {};
    uint32_t                               width           = {};
    uint32_t                               height          = {};
    uint32_t                               layerCount      = {};
    uint32_t                               viewFormatCount = {};
    const VULKAN_HPP_NAMESPACE::Format *   pViewFormats    = {};
  };
  static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FramebufferAttachmentImageInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
  {
    using Type = FramebufferAttachmentImageInfo;
  };
  using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;

  struct FramebufferAttachmentsCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(
      uint32_t                                                     attachmentImageInfoCount_ = {},
      const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_    = {} ) VULKAN_HPP_NOEXCEPT
      : attachmentImageInfoCount( attachmentImageInfoCount_ )
      , pAttachmentImageInfos( pAttachmentImageInfos_ )
    {}

    VULKAN_HPP_CONSTEXPR
      FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : FramebufferAttachmentsCreateInfo( *reinterpret_cast<FramebufferAttachmentsCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferAttachmentsCreateInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const &
        attachmentImageInfos_ )
      : attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) )
      , pAttachmentImageInfos( attachmentImageInfos_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo &
                            operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>( &rhs );
      return *this;
    }

    FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    FramebufferAttachmentsCreateInfo &
      setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentImageInfoCount = attachmentImageInfoCount_;
      return *this;
    }

    FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos(
      const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachmentImageInfos = pAttachmentImageInfos_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    FramebufferAttachmentsCreateInfo & setAttachmentImageInfos(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const &
        attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
      pAttachmentImageInfos    = attachmentImageInfos_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo *>( this );
    }

    operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default;
#else
    bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) &&
             ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
    }

    bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                    = StructureType::eFramebufferAttachmentsCreateInfo;
    const void *                        pNext                    = {};
    uint32_t                            attachmentImageInfoCount = {};
    const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {};
  };
  static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<FramebufferAttachmentsCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
  {
    using Type = FramebufferAttachmentsCreateInfo;
  };
  using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;

  struct GraphicsShaderGroupCreateInfoNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(
      uint32_t                                                          stageCount_         = {},
      const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo *       pStages_            = {},
      const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *  pVertexInputState_  = {},
      const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} ) VULKAN_HPP_NOEXCEPT
      : stageCount( stageCount_ )
      , pStages( pStages_ )
      , pVertexInputState( pVertexInputState_ )
      , pTessellationState( pTessellationState_ )
    {}

    VULKAN_HPP_CONSTEXPR
      GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast<GraphicsShaderGroupCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsShaderGroupCreateInfoNV(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
                                                                        stages_,
      const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *  pVertexInputState_  = {},
      const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {} )
      : stageCount( static_cast<uint32_t>( stages_.size() ) )
      , pStages( stages_.data() )
      , pVertexInputState( pVertexInputState_ )
      , pTessellationState( pTessellationState_ )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV &
                            operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const *>( &rhs );
      return *this;
    }

    GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = stageCount_;
      return *this;
    }

    GraphicsShaderGroupCreateInfoNV &
      setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT
    {
      pStages = pStages_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsShaderGroupCreateInfoNV & setStages(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const &
        stages_ ) VULKAN_HPP_NOEXCEPT
    {
      stageCount = static_cast<uint32_t>( stages_.size() );
      pStages    = stages_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GraphicsShaderGroupCreateInfoNV & setPVertexInputState(
      const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
    {
      pVertexInputState = pVertexInputState_;
      return *this;
    }

    GraphicsShaderGroupCreateInfoNV & setPTessellationState(
      const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT
    {
      pTessellationState = pTessellationState_;
      return *this;
    }

    operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV *>( this );
    }

    operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default;
#else
    bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) &&
             ( pStages == rhs.pStages ) && ( pVertexInputState == rhs.pVertexInputState ) &&
             ( pTessellationState == rhs.pTessellationState );
    }

    bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                         sType = StructureType::eGraphicsShaderGroupCreateInfoNV;
    const void *                                                pNext = {};
    uint32_t                                                    stageCount               = {};
    const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages                  = {};
    const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo *  pVertexInputState  = {};
    const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {};
  };
  static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<GraphicsShaderGroupCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eGraphicsShaderGroupCreateInfoNV>
  {
    using Type = GraphicsShaderGroupCreateInfoNV;
  };

  struct GraphicsPipelineShaderGroupsCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(
      uint32_t                                                      groupCount_    = {},
      const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_       = {},
      uint32_t                                                      pipelineCount_ = {},
      const VULKAN_HPP_NAMESPACE::Pipeline *                        pPipelines_    = {} ) VULKAN_HPP_NOEXCEPT
      : groupCount( groupCount_ )
      , pGroups( pGroups_ )
      , pipelineCount( pipelineCount_ )
      , pPipelines( pPipelines_ )
    {}

    VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(
      GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : GraphicsPipelineShaderGroupsCreateInfoNV(
          *reinterpret_cast<GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineShaderGroupsCreateInfoNV(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const &
                                                                                                  groups_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ = {} )
      : groupCount( static_cast<uint32_t>( groups_.size() ) )
      , pGroups( groups_.data() )
      , pipelineCount( static_cast<uint32_t>( pipelines_.size() ) )
      , pPipelines( pipelines_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV &
                            operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    GraphicsPipelineShaderGroupsCreateInfoNV &
      operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs );
      return *this;
    }

    GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = groupCount_;
      return *this;
    }

    GraphicsPipelineShaderGroupsCreateInfoNV &
      setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT
    {
      pGroups = pGroups_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineShaderGroupsCreateInfoNV & setGroups(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const &
        groups_ ) VULKAN_HPP_NOEXCEPT
    {
      groupCount = static_cast<uint32_t>( groups_.size() );
      pGroups    = groups_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineCount = pipelineCount_;
      return *this;
    }

    GraphicsPipelineShaderGroupsCreateInfoNV &
      setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelines = pPipelines_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ )
      VULKAN_HPP_NOEXCEPT
    {
      pipelineCount = static_cast<uint32_t>( pipelines_.size() );
      pPipelines    = pipelines_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV *>( this );
    }

    operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default;
#else
    bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) &&
             ( pGroups == rhs.pGroups ) && ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines );
    }

    bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType      = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
    const void *                        pNext      = {};
    uint32_t                            groupCount = {};
    const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups       = {};
    uint32_t                                                      pipelineCount = {};
    const VULKAN_HPP_NAMESPACE::Pipeline *                        pPipelines    = {};
  };
  static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) ==
                   sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<GraphicsPipelineShaderGroupsCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV>
  {
    using Type = GraphicsPipelineShaderGroupsCreateInfoNV;
  };

  struct HeadlessSurfaceCreateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} )
      VULKAN_HPP_NOEXCEPT : flags( flags_ )
    {}

    VULKAN_HPP_CONSTEXPR
      HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast<HeadlessSurfaceCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT &
                            operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>( &rhs );
      return *this;
    }

    HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    HeadlessSurfaceCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( this );
    }

    operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default;
#else
    bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags );
    }

    bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
    const void *                                        pNext = {};
    VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
  };
  static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<HeadlessSurfaceCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
  {
    using Type = HeadlessSurfaceCreateInfoEXT;
  };

#if defined( VK_USE_PLATFORM_IOS_MVK )
  struct IOSSurfaceCreateInfoMVK
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {},
                                                  const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pView( pView_ )
    {}

    VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
      : IOSSurfaceCreateInfoMVK( *reinterpret_cast<IOSSurfaceCreateInfoMVK const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK &
                            operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>( &rhs );
      return *this;
    }

    IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
    {
      pView = pView_;
      return *this;
    }

    operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( this );
    }

    operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default;
#  else
    bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView );
    }

    bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType = StructureType::eIosSurfaceCreateInfoMVK;
    const void *                                   pNext = {};
    VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
    const void *                                   pView = {};
  };
  static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eIosSurfaceCreateInfoMVK>
  {
    using Type = IOSSurfaceCreateInfoMVK;
  };
#endif /*VK_USE_PLATFORM_IOS_MVK*/

  struct ImageDrmFormatModifierExplicitCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(
      uint64_t                                        drmFormatModifier_           = {},
      uint32_t                                        drmFormatModifierPlaneCount_ = {},
      const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_               = {} ) VULKAN_HPP_NOEXCEPT
      : drmFormatModifier( drmFormatModifier_ )
      , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ )
      , pPlaneLayouts( pPlaneLayouts_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(
      ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : ImageDrmFormatModifierExplicitCreateInfoEXT(
          *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageDrmFormatModifierExplicitCreateInfoEXT(
      uint64_t drmFormatModifier_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const &
        planeLayouts_ )
      : drmFormatModifier( drmFormatModifier_ )
      , drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) )
      , pPlaneLayouts( planeLayouts_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT &
                            operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageDrmFormatModifierExplicitCreateInfoEXT &
      operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
      return *this;
    }

    ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageDrmFormatModifierExplicitCreateInfoEXT &
      setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifier = drmFormatModifier_;
      return *this;
    }

    ImageDrmFormatModifierExplicitCreateInfoEXT &
      setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
      return *this;
    }

    ImageDrmFormatModifierExplicitCreateInfoEXT &
      setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pPlaneLayouts = pPlaneLayouts_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const &
        planeLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
      pPlaneLayouts               = planeLayouts_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
    }

    operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default;
#else
    bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) &&
             ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) &&
             ( pPlaneLayouts == rhs.pPlaneLayouts );
    }

    bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType             sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
    const void *                                    pNext = {};
    uint64_t                                        drmFormatModifier           = {};
    uint32_t                                        drmFormatModifierPlaneCount = {};
    const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts               = {};
  };
  static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) ==
                   sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageDrmFormatModifierExplicitCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
  {
    using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
  };

  struct ImageDrmFormatModifierListCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eImageDrmFormatModifierListCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageDrmFormatModifierListCreateInfoEXT( uint32_t         drmFormatModifierCount_ = {},
                                               const uint64_t * pDrmFormatModifiers_    = {} ) VULKAN_HPP_NOEXCEPT
      : drmFormatModifierCount( drmFormatModifierCount_ )
      , pDrmFormatModifiers( pDrmFormatModifiers_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageDrmFormatModifierListCreateInfoEXT(
          *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageDrmFormatModifierListCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ )
      : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) )
      , pDrmFormatModifiers( drmFormatModifiers_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT &
                            operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageDrmFormatModifierListCreateInfoEXT &
      operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
      return *this;
    }

    ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageDrmFormatModifierListCreateInfoEXT &
      setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifierCount = drmFormatModifierCount_;
      return *this;
    }

    ImageDrmFormatModifierListCreateInfoEXT &
      setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
    {
      pDrmFormatModifiers = pDrmFormatModifiers_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
      pDrmFormatModifiers    = drmFormatModifiers_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT *>( this );
    }

    operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default;
#else
    bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( drmFormatModifierCount == rhs.drmFormatModifierCount ) &&
             ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
    }

    bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
    const void *                        pNext = {};
    uint32_t                            drmFormatModifierCount = {};
    const uint64_t *                    pDrmFormatModifiers    = {};
  };
  static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) ==
                   sizeof( VkImageDrmFormatModifierListCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageDrmFormatModifierListCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
  {
    using Type = ImageDrmFormatModifierListCreateInfoEXT;
  };

  struct ImageFormatListCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageFormatListCreateInfo( uint32_t                             viewFormatCount_ = {},
                                 const VULKAN_HPP_NAMESPACE::Format * pViewFormats_    = {} ) VULKAN_HPP_NOEXCEPT
      : viewFormatCount( viewFormatCount_ )
      , pViewFormats( pViewFormats_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageFormatListCreateInfo( *reinterpret_cast<ImageFormatListCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageFormatListCreateInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
      : viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo &
                            operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>( &rhs );
      return *this;
    }

    ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewFormatCount = viewFormatCount_;
      return *this;
    }

    ImageFormatListCreateInfo &
      setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewFormats = pViewFormats_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ImageFormatListCreateInfo & setViewFormats(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
      VULKAN_HPP_NOEXCEPT
    {
      viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
      pViewFormats    = viewFormats_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageFormatListCreateInfo *>( this );
    }

    operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageFormatListCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageFormatListCreateInfo const & ) const = default;
#else
    bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) &&
             ( pViewFormats == rhs.pViewFormats );
    }

    bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType  sType           = StructureType::eImageFormatListCreateInfo;
    const void *                         pNext           = {};
    uint32_t                             viewFormatCount = {};
    const VULKAN_HPP_NAMESPACE::Format * pViewFormats    = {};
  };
  static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageFormatListCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
  {
    using Type = ImageFormatListCreateInfo;
  };
  using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;

#if defined( VK_USE_PLATFORM_FUCHSIA )
  struct ImagePipeSurfaceCreateInfoFUCHSIA
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {},
                                         zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , imagePipeHandle( imagePipeHandle_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast<ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA &
                            operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs );
      return *this;
    }

    ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImagePipeSurfaceCreateInfoFUCHSIA &
      setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
    {
      imagePipeHandle = imagePipeHandle_;
      return *this;
    }

    operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( this );
    }

    operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 );
    }

    bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                      sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
    const void *                                             pNext = {};
    VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
    zx_handle_t                                              imagePipeHandle = {};
  };
  static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImagePipeSurfaceCreateInfoFUCHSIA>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImagepipeSurfaceCreateInfoFUCHSIA>
  {
    using Type = ImagePipeSurfaceCreateInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  struct ImagePlaneMemoryRequirementsInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ =
                                          VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT
      : planeAspect( planeAspect_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast<ImagePlaneMemoryRequirementsInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo &
                            operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>( &rhs );
      return *this;
    }

    ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImagePlaneMemoryRequirementsInfo &
      setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
    {
      planeAspect = planeAspect_;
      return *this;
    }

    operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo *>( this );
    }

    operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default;
#else
    bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect );
    }

    bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType       = StructureType::eImagePlaneMemoryRequirementsInfo;
    const void *                              pNext       = {};
    VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
  };
  static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImagePlaneMemoryRequirementsInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
  {
    using Type = ImagePlaneMemoryRequirementsInfo;
  };
  using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;

  struct ImageStencilUsageCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT
      : stencilUsage( stencilUsage_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageStencilUsageCreateInfo( *reinterpret_cast<ImageStencilUsageCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo &
                            operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>( &rhs );
      return *this;
    }

    ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageStencilUsageCreateInfo &
      setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilUsage = stencilUsage_;
      return *this;
    }

    operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageStencilUsageCreateInfo *>( this );
    }

    operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageStencilUsageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default;
#else
    bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage );
    }

    bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType        = StructureType::eImageStencilUsageCreateInfo;
    const void *                          pNext        = {};
    VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
  };
  static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageStencilUsageCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
  {
    using Type = ImageStencilUsageCreateInfo;
  };
  using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;

  struct ImageSwapchainCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT
      : swapchain( swapchain_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageSwapchainCreateInfoKHR( *reinterpret_cast<ImageSwapchainCreateInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR &
                            operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>( &rhs );
      return *this;
    }

    ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchain = swapchain_;
      return *this;
    }

    operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR *>( this );
    }

    operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageSwapchainCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default;
#else
    bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain );
    }

    bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType     = StructureType::eImageSwapchainCreateInfoKHR;
    const void *                        pNext     = {};
    VULKAN_HPP_NAMESPACE::SwapchainKHR  swapchain = {};
  };
  static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageSwapchainCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
  {
    using Type = ImageSwapchainCreateInfoKHR;
  };

  struct ImageViewASTCDecodeModeEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(
      VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
      : decodeMode( decodeMode_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewASTCDecodeModeEXT( *reinterpret_cast<ImageViewASTCDecodeModeEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT &
                            operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>( &rhs );
      return *this;
    }

    ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
    {
      decodeMode = decodeMode_;
      return *this;
    }

    operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT *>( this );
    }

    operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewASTCDecodeModeEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default;
#else
    bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode );
    }

    bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType      = StructureType::eImageViewAstcDecodeModeEXT;
    const void *                        pNext      = {};
    VULKAN_HPP_NAMESPACE::Format        decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  };
  static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageViewASTCDecodeModeEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
  {
    using Type = ImageViewASTCDecodeModeEXT;
  };

  struct ImageViewUsageCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT
      : usage( usage_ )
    {}

    VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImageViewUsageCreateInfo( *reinterpret_cast<ImageViewUsageCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo &
                            operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>( &rhs );
      return *this;
    }

    ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
    {
      usage = usage_;
      return *this;
    }

    operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImageViewUsageCreateInfo *>( this );
    }

    operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImageViewUsageCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImageViewUsageCreateInfo const & ) const = default;
#else
    bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage );
    }

    bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType = StructureType::eImageViewUsageCreateInfo;
    const void *                          pNext = {};
    VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
  };
  static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
  {
    using Type = ImageViewUsageCreateInfo;
  };
  using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  struct ImportAndroidHardwareBufferInfoANDROID
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eImportAndroidHardwareBufferInfoANDROID;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {} ) VULKAN_HPP_NOEXCEPT
      : buffer( buffer_ )
    {}

    VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportAndroidHardwareBufferInfoANDROID(
          *reinterpret_cast<ImportAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID &
                            operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportAndroidHardwareBufferInfoANDROID &
      operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>( &rhs );
      return *this;
    }

    ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID *>( this );
    }

    operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default;
#  else
    bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer );
    }

    bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eImportAndroidHardwareBufferInfoANDROID;
    const void *                        pNext  = {};
    struct AHardwareBuffer *            buffer = {};
  };
  static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportAndroidHardwareBufferInfoANDROID>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportAndroidHardwareBufferInfoANDROID>
  {
    using Type = ImportAndroidHardwareBufferInfoANDROID;
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/

  struct ImportMemoryFdInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
                                                  VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                                int fd_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleType( handleType_ )
      , fd( fd_ )
    {}

    VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryFdInfoKHR( *reinterpret_cast<ImportMemoryFdInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR &
                            operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>( &rhs );
      return *this;
    }

    ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportMemoryFdInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
    {
      fd = fd_;
      return *this;
    }

    operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryFdInfoKHR *>( this );
    }

    operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryFdInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default;
#else
    bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd );
    }

    bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType = StructureType::eImportMemoryFdInfoKHR;
    const void *                                           pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
    int fd = {};
  };
  static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
  {
    using Type = ImportMemoryFdInfoKHR;
  };

  struct ImportMemoryHostPointerInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
                                        VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                      void * pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleType( handleType_ )
      , pHostPointer( pHostPointer_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryHostPointerInfoEXT( *reinterpret_cast<ImportMemoryHostPointerInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT &
                            operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>( &rhs );
      return *this;
    }

    ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportMemoryHostPointerInfoEXT &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT
    {
      pHostPointer = pHostPointer_;
      return *this;
    }

    operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT *>( this );
    }

    operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default;
#else
    bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) &&
             ( pHostPointer == rhs.pHostPointer );
    }

    bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType = StructureType::eImportMemoryHostPointerInfoEXT;
    const void *                                           pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
    void * pHostPointer = {};
  };
  static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportMemoryHostPointerInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
  {
    using Type = ImportMemoryHostPointerInfoEXT;
  };

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct ImportMemoryWin32HandleInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
                                        VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                      HANDLE  handle_ = {},
                                      LPCWSTR name_   = {} ) VULKAN_HPP_NOEXCEPT
      : handleType( handleType_ )
      , handle( handle_ )
      , name( name_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast<ImportMemoryWin32HandleInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR &
                            operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>( &rhs );
      return *this;
    }

    ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportMemoryWin32HandleInfoKHR &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }

    ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
    {
      name = name_;
      return *this;
    }

    operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR *>( this );
    }

    operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default;
#  else
    bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) &&
             ( handle == rhs.handle ) && ( name == rhs.name );
    }

    bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType = StructureType::eImportMemoryWin32HandleInfoKHR;
    const void *                                           pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
    HANDLE  handle = {};
    LPCWSTR name   = {};
  };
  static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoKHR>
  {
    using Type = ImportMemoryWin32HandleInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct ImportMemoryWin32HandleInfoNV
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {},
                                     HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleType( handleType_ )
      , handle( handle_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryWin32HandleInfoNV( *reinterpret_cast<ImportMemoryWin32HandleInfoNV const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV &
                            operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>( &rhs );
      return *this;
    }

    ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportMemoryWin32HandleInfoNV &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }

    operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV *>( this );
    }

    operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default;
#  else
    bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) &&
             ( handle == rhs.handle );
    }

    bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType      = StructureType::eImportMemoryWin32HandleInfoNV;
    const void *                                          pNext      = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
    HANDLE                                                handle     = {};
  };
  static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoNV>
  {
    using Type = ImportMemoryWin32HandleInfoNV;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  struct ImportMemoryZirconHandleInfoFUCHSIA
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eImportMemoryZirconHandleInfoFUCHSIA;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ImportMemoryZirconHandleInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
                                             VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd,
                                           zx_handle_t handle_ = {} ) VULKAN_HPP_NOEXCEPT
      : handleType( handleType_ )
      , handle( handle_ )
    {}

    VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
      : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast<ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA &
                            operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ImportMemoryZirconHandleInfoFUCHSIA &
      operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA const *>( &rhs );
      return *this;
    }

    ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ImportMemoryZirconHandleInfoFUCHSIA &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT
    {
      handle = handle_;
      return *this;
    }

    operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkImportMemoryZirconHandleInfoFUCHSIA *>( this );
    }

    operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkImportMemoryZirconHandleInfoFUCHSIA *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & ) const = default;
#  else
    bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) &&
             ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 );
    }

    bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                    sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA;
    const void *                                           pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
    zx_handle_t handle = {};
  };
  static_assert( sizeof( ImportMemoryZirconHandleInfoFUCHSIA ) == sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ImportMemoryZirconHandleInfoFUCHSIA>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eImportMemoryZirconHandleInfoFUCHSIA>
  {
    using Type = ImportMemoryZirconHandleInfoFUCHSIA;
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/

  struct InputAttachmentAspectReference
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      InputAttachmentAspectReference( uint32_t                               subpass_              = {},
                                      uint32_t                               inputAttachmentIndex_ = {},
                                      VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : subpass( subpass_ )
      , inputAttachmentIndex( inputAttachmentIndex_ )
      , aspectMask( aspectMask_ )
    {}

    VULKAN_HPP_CONSTEXPR
      InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
      : InputAttachmentAspectReference( *reinterpret_cast<InputAttachmentAspectReference const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference &
                            operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>( &rhs );
      return *this;
    }

    InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
    {
      subpass = subpass_;
      return *this;
    }

    InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      inputAttachmentIndex = inputAttachmentIndex_;
      return *this;
    }

    InputAttachmentAspectReference &
      setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectMask = aspectMask_;
      return *this;
    }

    operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkInputAttachmentAspectReference *>( this );
    }

    operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkInputAttachmentAspectReference *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( InputAttachmentAspectReference const & ) const = default;
#else
    bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) &&
             ( aspectMask == rhs.aspectMask );
    }

    bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                               subpass              = {};
    uint32_t                               inputAttachmentIndex = {};
    VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask           = {};
  };
  static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<InputAttachmentAspectReference>::value,
                 "struct wrapper is not a standard layout!" );
  using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;

  struct InstanceCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags     flags_                 = {},
                                             const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_      = {},
                                             uint32_t                                      enabledLayerCount_     = {},
                                             const char * const *                          ppEnabledLayerNames_   = {},
                                             uint32_t                                      enabledExtensionCount_ = {},
                                             const char * const * ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pApplicationInfo( pApplicationInfo_ )
      , enabledLayerCount( enabledLayerCount_ )
      , ppEnabledLayerNames( ppEnabledLayerNames_ )
      , enabledExtensionCount( enabledExtensionCount_ )
      , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
    {}

    VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : InstanceCreateInfo( *reinterpret_cast<InstanceCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    InstanceCreateInfo(
      VULKAN_HPP_NAMESPACE::InstanceCreateFlags                                 flags_,
      const VULKAN_HPP_NAMESPACE::ApplicationInfo *                             pApplicationInfo_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ = {} )
      : flags( flags_ )
      , pApplicationInfo( pApplicationInfo_ )
      , enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) )
      , ppEnabledLayerNames( pEnabledLayerNames_.data() )
      , enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) )
      , ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo &
                            operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>( &rhs );
      return *this;
    }

    InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    InstanceCreateInfo &
      setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pApplicationInfo = pApplicationInfo_;
      return *this;
    }

    InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledLayerCount = enabledLayerCount_;
      return *this;
    }

    InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
    {
      ppEnabledLayerNames = ppEnabledLayerNames_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    InstanceCreateInfo & setPEnabledLayerNames(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledLayerNames_ )
      VULKAN_HPP_NOEXCEPT
    {
      enabledLayerCount   = static_cast<uint32_t>( pEnabledLayerNames_.size() );
      ppEnabledLayerNames = pEnabledLayerNames_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledExtensionCount = enabledExtensionCount_;
      return *this;
    }

    InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
    {
      ppEnabledExtensionNames = ppEnabledExtensionNames_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    InstanceCreateInfo & setPEnabledExtensionNames(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char * const> const & pEnabledExtensionNames_ )
      VULKAN_HPP_NOEXCEPT
    {
      enabledExtensionCount   = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
      ppEnabledExtensionNames = pEnabledExtensionNames_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkInstanceCreateInfo *>( this );
    }

    operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkInstanceCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( InstanceCreateInfo const & ) const = default;
#else
    bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( pApplicationInfo == rhs.pApplicationInfo ) && ( enabledLayerCount == rhs.enabledLayerCount ) &&
             ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) &&
             ( enabledExtensionCount == rhs.enabledExtensionCount ) &&
             ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
    }

    bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType           sType                   = StructureType::eInstanceCreateInfo;
    const void *                                  pNext                   = {};
    VULKAN_HPP_NAMESPACE::InstanceCreateFlags     flags                   = {};
    const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo        = {};
    uint32_t                                      enabledLayerCount       = {};
    const char * const *                          ppEnabledLayerNames     = {};
    uint32_t                                      enabledExtensionCount   = {};
    const char * const *                          ppEnabledExtensionNames = {};
  };
  static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eInstanceCreateInfo>
  {
    using Type = InstanceCreateInfo;
  };

#if defined( VK_USE_PLATFORM_MACOS_MVK )
  struct MacOSSurfaceCreateInfoMVK
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {},
                                                    const void * pView_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pView( pView_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
      : MacOSSurfaceCreateInfoMVK( *reinterpret_cast<MacOSSurfaceCreateInfoMVK const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK &
                            operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>( &rhs );
      return *this;
    }

    MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT
    {
      pView = pView_;
      return *this;
    }

    operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( this );
    }

    operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default;
#  else
    bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView );
    }

    bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType = StructureType::eMacosSurfaceCreateInfoMVK;
    const void *                                     pNext = {};
    VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
    const void *                                     pView = {};
  };
  static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MacOSSurfaceCreateInfoMVK>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMacosSurfaceCreateInfoMVK>
  {
    using Type = MacOSSurfaceCreateInfoMVK;
  };
#endif /*VK_USE_PLATFORM_MACOS_MVK*/

  struct MemoryAllocateFlagsInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {},
                                                  uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , deviceMask( deviceMask_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryAllocateFlagsInfo( *reinterpret_cast<MemoryAllocateFlagsInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo &
                            operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>( &rhs );
      return *this;
    }

    MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMask = deviceMask_;
      return *this;
    }

    operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryAllocateFlagsInfo *>( this );
    }

    operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryAllocateFlagsInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default;
#else
    bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( deviceMask == rhs.deviceMask );
    }

    bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType      = StructureType::eMemoryAllocateFlagsInfo;
    const void *                              pNext      = {};
    VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags      = {};
    uint32_t                                  deviceMask = {};
  };
  static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
  {
    using Type = MemoryAllocateFlagsInfo;
  };
  using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;

  struct MemoryDedicatedAllocateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image  image_  = {},
                                                      VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT
      : image( image_ )
      , buffer( buffer_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryDedicatedAllocateInfo( *reinterpret_cast<MemoryDedicatedAllocateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo &
                            operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>( &rhs );
      return *this;
    }

    MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
    {
      image = image_;
      return *this;
    }

    MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
    {
      buffer = buffer_;
      return *this;
    }

    operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo *>( this );
    }

    operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryDedicatedAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default;
#else
    bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer );
    }

    bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType  = StructureType::eMemoryDedicatedAllocateInfo;
    const void *                        pNext  = {};
    VULKAN_HPP_NAMESPACE::Image         image  = {};
    VULKAN_HPP_NAMESPACE::Buffer        buffer = {};
  };
  static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryDedicatedAllocateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
  {
    using Type = MemoryDedicatedAllocateInfo;
  };
  using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;

  struct MemoryDedicatedRequirements
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_  = {},
                                   VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
      : prefersDedicatedAllocation( prefersDedicatedAllocation_ )
      , requiresDedicatedAllocation( requiresDedicatedAllocation_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryDedicatedRequirements( *reinterpret_cast<MemoryDedicatedRequirements const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedRequirements &
                            operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>( &rhs );
      return *this;
    }

    operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryDedicatedRequirements *>( this );
    }

    operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryDedicatedRequirements *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryDedicatedRequirements const & ) const = default;
#else
    bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) &&
             ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
    }

    bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                       = StructureType::eMemoryDedicatedRequirements;
    void *                              pNext                       = {};
    VULKAN_HPP_NAMESPACE::Bool32        prefersDedicatedAllocation  = {};
    VULKAN_HPP_NAMESPACE::Bool32        requiresDedicatedAllocation = {};
  };
  static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryDedicatedRequirements>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
  {
    using Type = MemoryDedicatedRequirements;
  };
  using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;

  struct MemoryOpaqueCaptureAddressAllocateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {} ) VULKAN_HPP_NOEXCEPT
      : opaqueCaptureAddress( opaqueCaptureAddress_ )
    {}

    VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryOpaqueCaptureAddressAllocateInfo(
          *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo &
                            operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryOpaqueCaptureAddressAllocateInfo &
      operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
      return *this;
    }

    MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryOpaqueCaptureAddressAllocateInfo &
      setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      opaqueCaptureAddress = opaqueCaptureAddress_;
      return *this;
    }

    operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
    }

    operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default;
#else
    bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
    }

    bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
    const void *                        pNext                = {};
    uint64_t                            opaqueCaptureAddress = {};
  };
  static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryOpaqueCaptureAddressAllocateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
  {
    using Type = MemoryOpaqueCaptureAddressAllocateInfo;
  };
  using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;

  struct MemoryPriorityAllocateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT
      : priority( priority_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MemoryPriorityAllocateInfoEXT( *reinterpret_cast<MemoryPriorityAllocateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT &
                            operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>( &rhs );
      return *this;
    }

    MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
    {
      priority = priority_;
      return *this;
    }

    operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT *>( this );
    }

    operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default;
#else
    bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority );
    }

    bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::eMemoryPriorityAllocateInfoEXT;
    const void *                        pNext    = {};
    float                               priority = {};
  };
  static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MemoryPriorityAllocateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMemoryPriorityAllocateInfoEXT>
  {
    using Type = MemoryPriorityAllocateInfoEXT;
  };

#if defined( VK_USE_PLATFORM_METAL_EXT )
  struct MetalSurfaceCreateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {},
                                                    const CAMetalLayer * pLayer_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pLayer( pLayer_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : MetalSurfaceCreateInfoEXT( *reinterpret_cast<MetalSurfaceCreateInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT &
                            operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>( &rhs );
      return *this;
    }

    MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      pLayer = pLayer_;
      return *this;
    }

    operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( this );
    }

    operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default;
#  else
    bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer );
    }

    bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType  = StructureType::eMetalSurfaceCreateInfoEXT;
    const void *                                     pNext  = {};
    VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags  = {};
    const CAMetalLayer *                             pLayer = {};
  };
  static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MetalSurfaceCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMetalSurfaceCreateInfoEXT>
  {
    using Type = MetalSurfaceCreateInfoEXT;
  };
#endif /*VK_USE_PLATFORM_METAL_EXT*/

  struct MutableDescriptorTypeListVALVE
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE(
      uint32_t                                     descriptorTypeCount_ = {},
      const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_    = {} ) VULKAN_HPP_NOEXCEPT
      : descriptorTypeCount( descriptorTypeCount_ )
      , pDescriptorTypes( pDescriptorTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR
      MutableDescriptorTypeListVALVE( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MutableDescriptorTypeListVALVE( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : MutableDescriptorTypeListVALVE( *reinterpret_cast<MutableDescriptorTypeListVALVE const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MutableDescriptorTypeListVALVE(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const &
        descriptorTypes_ )
      : descriptorTypeCount( static_cast<uint32_t>( descriptorTypes_.size() ) )
      , pDescriptorTypes( descriptorTypes_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE &
                            operator=( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MutableDescriptorTypeListVALVE & operator=( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE const *>( &rhs );
      return *this;
    }

    MutableDescriptorTypeListVALVE & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorTypeCount = descriptorTypeCount_;
      return *this;
    }

    MutableDescriptorTypeListVALVE &
      setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      pDescriptorTypes = pDescriptorTypes_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MutableDescriptorTypeListVALVE & setDescriptorTypes(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const &
        descriptorTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorTypeCount = static_cast<uint32_t>( descriptorTypes_.size() );
      pDescriptorTypes    = descriptorTypes_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkMutableDescriptorTypeListVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMutableDescriptorTypeListVALVE *>( this );
    }

    operator VkMutableDescriptorTypeListVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMutableDescriptorTypeListVALVE *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MutableDescriptorTypeListVALVE const & ) const = default;
#else
    bool operator==( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes );
    }

    bool operator!=( MutableDescriptorTypeListVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                     descriptorTypeCount = {};
    const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes    = {};
  };
  static_assert( sizeof( MutableDescriptorTypeListVALVE ) == sizeof( VkMutableDescriptorTypeListVALVE ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MutableDescriptorTypeListVALVE>::value,
                 "struct wrapper is not a standard layout!" );

  struct MutableDescriptorTypeCreateInfoVALVE
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eMutableDescriptorTypeCreateInfoVALVE;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE(
      uint32_t                                                     mutableDescriptorTypeListCount_ = {},
      const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_    = {} )
      VULKAN_HPP_NOEXCEPT
      : mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ )
      , pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ )
    {}

    VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( MutableDescriptorTypeCreateInfoVALVE const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    MutableDescriptorTypeCreateInfoVALVE( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
      : MutableDescriptorTypeCreateInfoVALVE( *reinterpret_cast<MutableDescriptorTypeCreateInfoVALVE const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MutableDescriptorTypeCreateInfoVALVE(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE> const &
        mutableDescriptorTypeLists_ )
      : mutableDescriptorTypeListCount( static_cast<uint32_t>( mutableDescriptorTypeLists_.size() ) )
      , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE &
                            operator=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    MutableDescriptorTypeCreateInfoVALVE &
      operator=( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE const *>( &rhs );
      return *this;
    }

    MutableDescriptorTypeCreateInfoVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    MutableDescriptorTypeCreateInfoVALVE &
      setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT
    {
      mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_;
      return *this;
    }

    MutableDescriptorTypeCreateInfoVALVE & setPMutableDescriptorTypeLists(
      const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
    {
      pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeLists(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE> const &
        mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
    {
      mutableDescriptorTypeListCount = static_cast<uint32_t>( mutableDescriptorTypeLists_.size() );
      pMutableDescriptorTypeLists    = mutableDescriptorTypeLists_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkMutableDescriptorTypeCreateInfoVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkMutableDescriptorTypeCreateInfoVALVE *>( this );
    }

    operator VkMutableDescriptorTypeCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkMutableDescriptorTypeCreateInfoVALVE *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( MutableDescriptorTypeCreateInfoVALVE const & ) const = default;
#else
    bool operator==( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) &&
             ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists );
    }

    bool operator!=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoVALVE;
    const void *                        pNext = {};
    uint32_t                            mutableDescriptorTypeListCount                       = {};
    const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE * pMutableDescriptorTypeLists = {};
  };
  static_assert( sizeof( MutableDescriptorTypeCreateInfoVALVE ) == sizeof( VkMutableDescriptorTypeCreateInfoVALVE ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<MutableDescriptorTypeCreateInfoVALVE>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eMutableDescriptorTypeCreateInfoVALVE>
  {
    using Type = MutableDescriptorTypeCreateInfoVALVE;
  };

  union PerformanceCounterResultKHR
  {
    PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
    }

    PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {}

    PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {}

    PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {}

    PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {}

    PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {}

    PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {}

    PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
    {
      int32 = int32_;
      return *this;
    }

    PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
    {
      int64 = int64_;
      return *this;
    }

    PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
    {
      uint32 = uint32_;
      return *this;
    }

    PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
    {
      uint64 = uint64_;
      return *this;
    }

    PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
    {
      float32 = float32_;
      return *this;
    }

    PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
    {
      float64 = float64_;
      return *this;
    }

    VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR &
      operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      memcpy( static_cast<void *>( this ), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
      return *this;
    }

    operator VkPerformanceCounterResultKHR const &() const
    {
      return *reinterpret_cast<const VkPerformanceCounterResultKHR *>( this );
    }

    operator VkPerformanceCounterResultKHR &()
    {
      return *reinterpret_cast<VkPerformanceCounterResultKHR *>( this );
    }

    int32_t  int32;
    int64_t  int64;
    uint32_t uint32;
    uint64_t uint64;
    float    float32;
    double   float64;
  };

  struct PerformanceQuerySubmitInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : counterPassIndex( counterPassIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PerformanceQuerySubmitInfoKHR( *reinterpret_cast<PerformanceQuerySubmitInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR &
                            operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>( &rhs );
      return *this;
    }

    PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      counterPassIndex = counterPassIndex_;
      return *this;
    }

    operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR *>( this );
    }

    operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default;
#else
    bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex );
    }

    bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePerformanceQuerySubmitInfoKHR;
    const void *                        pNext            = {};
    uint32_t                            counterPassIndex = {};
  };
  static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PerformanceQuerySubmitInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
  {
    using Type = PerformanceQuerySubmitInfoKHR;
  };

  struct PhysicalDevice16BitStorageFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevice16BitStorageFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_           = {},
                                          VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
                                          VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_              = {},
                                          VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT
      : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
      , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
      , storagePushConstant16( storagePushConstant16_ )
      , storageInputOutput16( storageInputOutput16_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevice16BitStorageFeatures( *reinterpret_cast<PhysicalDevice16BitStorageFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures &
                            operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevice16BitStorageFeatures &
      operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDevice16BitStorageFeatures &
      setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      storageBuffer16BitAccess = storageBuffer16BitAccess_;
      return *this;
    }

    PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess(
      VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
      return *this;
    }

    PhysicalDevice16BitStorageFeatures &
      setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
    {
      storagePushConstant16 = storagePushConstant16_;
      return *this;
    }

    PhysicalDevice16BitStorageFeatures &
      setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
    {
      storageInputOutput16 = storageInputOutput16_;
      return *this;
    }

    operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures *>( this );
    }

    operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default;
#else
    bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
             ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) &&
             ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
             ( storageInputOutput16 == rhs.storageInputOutput16 );
    }

    bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                    = StructureType::ePhysicalDevice16BitStorageFeatures;
    void *                              pNext                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        storageBuffer16BitAccess = {};
    VULKAN_HPP_NAMESPACE::Bool32        uniformAndStorageBuffer16BitAccess = {};
    VULKAN_HPP_NAMESPACE::Bool32        storagePushConstant16              = {};
    VULKAN_HPP_NAMESPACE::Bool32        storageInputOutput16               = {};
  };
  static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevice16BitStorageFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
  {
    using Type = PhysicalDevice16BitStorageFeatures;
  };
  using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;

  struct PhysicalDevice4444FormatsFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevice4444FormatsFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {},
                                            VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {} ) VULKAN_HPP_NOEXCEPT
      : formatA4R4G4B4( formatA4R4G4B4_ )
      , formatA4B4G4R4( formatA4B4G4R4_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT &
                            operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevice4444FormatsFeaturesEXT &
      operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDevice4444FormatsFeaturesEXT &
      setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
    {
      formatA4R4G4B4 = formatA4R4G4B4_;
      return *this;
    }

    PhysicalDevice4444FormatsFeaturesEXT &
      setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
    {
      formatA4B4G4R4 = formatA4B4G4R4_;
      return *this;
    }

    operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) &&
             ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
    }

    bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
    void *                              pNext          = {};
    VULKAN_HPP_NAMESPACE::Bool32        formatA4R4G4B4 = {};
    VULKAN_HPP_NAMESPACE::Bool32        formatA4B4G4R4 = {};
  };
  static_assert( sizeof( PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevice4444FormatsFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
  {
    using Type = PhysicalDevice4444FormatsFeaturesEXT;
  };

  struct PhysicalDevice8BitStorageFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevice8BitStorageFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_           = {},
                                         VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {},
                                         VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT
      : storageBuffer8BitAccess( storageBuffer8BitAccess_ )
      , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
      , storagePushConstant8( storagePushConstant8_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevice8BitStorageFeatures( *reinterpret_cast<PhysicalDevice8BitStorageFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures &
                            operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDevice8BitStorageFeatures &
      setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      storageBuffer8BitAccess = storageBuffer8BitAccess_;
      return *this;
    }

    PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess(
      VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
      return *this;
    }

    PhysicalDevice8BitStorageFeatures &
      setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
    {
      storagePushConstant8 = storagePushConstant8_;
      return *this;
    }

    operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures *>( this );
    }

    operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default;
#else
    bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
             ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) &&
             ( storagePushConstant8 == rhs.storagePushConstant8 );
    }

    bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                   = StructureType::ePhysicalDevice8BitStorageFeatures;
    void *                              pNext                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        storageBuffer8BitAccess = {};
    VULKAN_HPP_NAMESPACE::Bool32        uniformAndStorageBuffer8BitAccess = {};
    VULKAN_HPP_NAMESPACE::Bool32        storagePushConstant8              = {};
  };
  static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevice8BitStorageFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
  {
    using Type = PhysicalDevice8BitStorageFeatures;
  };
  using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;

  struct PhysicalDeviceASTCDecodeFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT
      : decodeModeSharedExponent( decodeModeSharedExponent_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast<PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT &
                            operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceASTCDecodeFeaturesEXT &
      operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceASTCDecodeFeaturesEXT &
      setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
    {
      decodeModeSharedExponent = decodeModeSharedExponent_;
      return *this;
    }

    operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
    }

    bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                    = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
    void *                              pNext                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        decodeModeSharedExponent = {};
  };
  static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceASTCDecodeFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
  {
    using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
  };

  struct PhysicalDeviceAccelerationStructureFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_                                 = {},
      VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_                    = {},
      VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_                    = {},
      VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_                     = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT
      : accelerationStructure( accelerationStructure_ )
      , accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ )
      , accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ )
      , accelerationStructureHostCommands( accelerationStructureHostCommands_ )
      , descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(
      PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceAccelerationStructureFeaturesKHR(
          *reinterpret_cast<PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR &
                            operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAccelerationStructureFeaturesKHR &
      operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceAccelerationStructureFeaturesKHR &
      setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructure = accelerationStructure_;
      return *this;
    }

    PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay(
      VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_;
      return *this;
    }

    PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild(
      VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_;
      return *this;
    }

    PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands(
      VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureHostCommands = accelerationStructureHostCommands_;
      return *this;
    }

    PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_;
      return *this;
    }

    operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructureFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceAccelerationStructureFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( accelerationStructure == rhs.accelerationStructure ) &&
             ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) &&
             ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) &&
             ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) &&
             ( descriptorBindingAccelerationStructureUpdateAfterBind ==
               rhs.descriptorBindingAccelerationStructureUpdateAfterBind );
    }

    bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        accelerationStructure                                 = {};
    VULKAN_HPP_NAMESPACE::Bool32        accelerationStructureCaptureReplay                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        accelerationStructureIndirectBuild                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        accelerationStructureHostCommands                     = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingAccelerationStructureUpdateAfterBind = {};
  };
  static_assert( sizeof( PhysicalDeviceAccelerationStructureFeaturesKHR ) ==
                   sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceAccelerationStructureFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR>
  {
    using Type = PhysicalDeviceAccelerationStructureFeaturesKHR;
  };

  struct PhysicalDeviceAccelerationStructurePropertiesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(
      uint64_t maxGeometryCount_                                           = {},
      uint64_t maxInstanceCount_                                           = {},
      uint64_t maxPrimitiveCount_                                          = {},
      uint32_t maxPerStageDescriptorAccelerationStructures_                = {},
      uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {},
      uint32_t maxDescriptorSetAccelerationStructures_                     = {},
      uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_      = {},
      uint32_t minAccelerationStructureScratchOffsetAlignment_             = {} ) VULKAN_HPP_NOEXCEPT
      : maxGeometryCount( maxGeometryCount_ )
      , maxInstanceCount( maxInstanceCount_ )
      , maxPrimitiveCount( maxPrimitiveCount_ )
      , maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ )
      , maxPerStageDescriptorUpdateAfterBindAccelerationStructures(
          maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ )
      , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
      , maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ )
      , minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(
      PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceAccelerationStructurePropertiesKHR(
          *reinterpret_cast<PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructurePropertiesKHR &
                            operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceAccelerationStructurePropertiesKHR &
      operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructurePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceAccelerationStructurePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) &&
             ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) &&
             ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) &&
             ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures ==
               rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) &&
             ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) &&
             ( maxDescriptorSetUpdateAfterBindAccelerationStructures ==
               rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) &&
             ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment );
    }

    bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
    void *                              pNext = {};
    uint64_t                            maxGeometryCount                                           = {};
    uint64_t                            maxInstanceCount                                           = {};
    uint64_t                            maxPrimitiveCount                                          = {};
    uint32_t                            maxPerStageDescriptorAccelerationStructures                = {};
    uint32_t                            maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {};
    uint32_t                            maxDescriptorSetAccelerationStructures                     = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindAccelerationStructures      = {};
    uint32_t                            minAccelerationStructureScratchOffsetAlignment             = {};
  };
  static_assert( sizeof( PhysicalDeviceAccelerationStructurePropertiesKHR ) ==
                   sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceAccelerationStructurePropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR>
  {
    using Type = PhysicalDeviceAccelerationStructurePropertiesKHR;
  };

  struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT
      : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(
      PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceBlendOperationAdvancedFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT &
                            operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBlendOperationAdvancedFeaturesEXT &
      operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations(
      VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
    {
      advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
      return *this;
    }

    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
    }

    bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        advancedBlendCoherentOperations = {};
  };
  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
  {
    using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
  };

  struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(
      uint32_t                     advancedBlendMaxColorAttachments_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_         = {},
      VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_        = {},
      VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_            = {} ) VULKAN_HPP_NOEXCEPT
      : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ )
      , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ )
      , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ )
      , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ )
      , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ )
      , advancedBlendAllOperations( advancedBlendAllOperations_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(
      PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceBlendOperationAdvancedPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedPropertiesEXT &
                            operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBlendOperationAdvancedPropertiesEXT &
      operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) &&
             ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) &&
             ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) &&
             ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) &&
             ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) &&
             ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
    }

    bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
    void *                              pNext = {};
    uint32_t                            advancedBlendMaxColorAttachments      = {};
    VULKAN_HPP_NAMESPACE::Bool32        advancedBlendIndependentBlend         = {};
    VULKAN_HPP_NAMESPACE::Bool32        advancedBlendNonPremultipliedSrcColor = {};
    VULKAN_HPP_NAMESPACE::Bool32        advancedBlendNonPremultipliedDstColor = {};
    VULKAN_HPP_NAMESPACE::Bool32        advancedBlendCorrelatedOverlap        = {};
    VULKAN_HPP_NAMESPACE::Bool32        advancedBlendAllOperations            = {};
  };
  static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
  {
    using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
  };

  struct PhysicalDeviceBufferDeviceAddressFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_              = {},
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_   = {} ) VULKAN_HPP_NOEXCEPT
      : bufferDeviceAddress( bufferDeviceAddress_ )
      , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
      , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(
      PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceBufferDeviceAddressFeatures(
          *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures &
                            operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBufferDeviceAddressFeatures &
      operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceBufferDeviceAddressFeatures &
      setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddress = bufferDeviceAddress_;
      return *this;
    }

    PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay(
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
      return *this;
    }

    PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice(
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
      return *this;
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
             ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) &&
             ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
    }

    bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
    void *                              pNext               = {};
    VULKAN_HPP_NAMESPACE::Bool32        bufferDeviceAddress = {};
    VULKAN_HPP_NAMESPACE::Bool32        bufferDeviceAddressCaptureReplay = {};
    VULKAN_HPP_NAMESPACE::Bool32        bufferDeviceAddressMultiDevice   = {};
  };
  static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) ==
                   sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
  {
    using Type = PhysicalDeviceBufferDeviceAddressFeatures;
  };
  using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;

  struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_              = {},
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_   = {} ) VULKAN_HPP_NOEXCEPT
      : bufferDeviceAddress( bufferDeviceAddress_ )
      , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
      , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(
      PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceBufferDeviceAddressFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT &
                            operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceBufferDeviceAddressFeaturesEXT &
      operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceBufferDeviceAddressFeaturesEXT &
      setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddress = bufferDeviceAddress_;
      return *this;
    }

    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay(
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
      return *this;
    }

    PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice(
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
      return *this;
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
             ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) &&
             ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
    }

    bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        bufferDeviceAddress              = {};
    VULKAN_HPP_NAMESPACE::Bool32        bufferDeviceAddressCaptureReplay = {};
    VULKAN_HPP_NAMESPACE::Bool32        bufferDeviceAddressMultiDevice   = {};
  };
  static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT>
  {
    using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
  };
  using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;

  struct PhysicalDeviceCoherentMemoryFeaturesAMD
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(
      VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT
      : deviceCoherentMemory( deviceCoherentMemory_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCoherentMemoryFeaturesAMD(
          *reinterpret_cast<PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD &
                            operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCoherentMemoryFeaturesAMD &
      operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs );
      return *this;
    }

    PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceCoherentMemoryFeaturesAMD &
      setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceCoherentMemory = deviceCoherentMemory_;
      return *this;
    }

    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD *>( this );
    }

    operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default;
#else
    bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory );
    }

    bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
    void *                              pNext                = {};
    VULKAN_HPP_NAMESPACE::Bool32        deviceCoherentMemory = {};
  };
  static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) ==
                   sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceCoherentMemoryFeaturesAMD>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD>
  {
    using Type = PhysicalDeviceCoherentMemoryFeaturesAMD;
  };

  struct PhysicalDeviceColorWriteEnableFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {} ) VULKAN_HPP_NOEXCEPT : colorWriteEnable( colorWriteEnable_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT(
      PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceColorWriteEnableFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT &
                            operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceColorWriteEnableFeaturesEXT &
      operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceColorWriteEnableFeaturesEXT &
      setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      colorWriteEnable = colorWriteEnable_;
      return *this;
    }

    operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceColorWriteEnableFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable );
    }

    bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT;
    void *                              pNext            = {};
    VULKAN_HPP_NAMESPACE::Bool32        colorWriteEnable = {};
  };
  static_assert( sizeof( PhysicalDeviceColorWriteEnableFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceColorWriteEnableFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT>
  {
    using Type = PhysicalDeviceColorWriteEnableFeaturesEXT;
  };

  struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_  = {},
      VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT
      : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ )
      , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(
      PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceComputeShaderDerivativesFeaturesNV(
          *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV &
                            operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceComputeShaderDerivativesFeaturesNV &
      operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceComputeShaderDerivativesFeaturesNV &
      setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
    {
      computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
      return *this;
    }

    PhysicalDeviceComputeShaderDerivativesFeaturesNV &
      setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
    {
      computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
      return *this;
    }

    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) &&
             ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
    }

    bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        computeDerivativeGroupQuads  = {};
    VULKAN_HPP_NAMESPACE::Bool32        computeDerivativeGroupLinear = {};
  };
  static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV>
  {
    using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV;
  };

  struct PhysicalDeviceConditionalRenderingFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT
      : conditionalRendering( conditionalRendering_ )
      , inheritedConditionalRendering( inheritedConditionalRendering_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(
      PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceConditionalRenderingFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT &
                            operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceConditionalRenderingFeaturesEXT &
      operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceConditionalRenderingFeaturesEXT &
      setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
    {
      conditionalRendering = conditionalRendering_;
      return *this;
    }

    PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering(
      VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
    {
      inheritedConditionalRendering = inheritedConditionalRendering_;
      return *this;
    }

    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) &&
             ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
    }

    bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        conditionalRendering          = {};
    VULKAN_HPP_NAMESPACE::Bool32        inheritedConditionalRendering = {};
  };
  static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceConditionalRenderingFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT>
  {
    using Type = PhysicalDeviceConditionalRenderingFeaturesEXT;
  };

  struct PhysicalDeviceConservativeRasterizationPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(
      float                        primitiveOverestimationSize_                 = {},
      float                        maxExtraPrimitiveOverestimationSize_         = {},
      float                        extraPrimitiveOverestimationSizeGranularity_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_                    = {},
      VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_               = {},
      VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_     = {},
      VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_  = {} ) VULKAN_HPP_NOEXCEPT
      : primitiveOverestimationSize( primitiveOverestimationSize_ )
      , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ )
      , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ )
      , primitiveUnderestimation( primitiveUnderestimation_ )
      , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ )
      , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ )
      , degenerateLinesRasterized( degenerateLinesRasterized_ )
      , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ )
      , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(
      PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceConservativeRasterizationPropertiesEXT(
      VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceConservativeRasterizationPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConservativeRasterizationPropertiesEXT &
                            operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceConservativeRasterizationPropertiesEXT &
      operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) &&
             ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) &&
             ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) &&
             ( primitiveUnderestimation == rhs.primitiveUnderestimation ) &&
             ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) &&
             ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) &&
             ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) &&
             ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) &&
             ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
    }

    bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
    void *                              pNext = {};
    float                               primitiveOverestimationSize                 = {};
    float                               maxExtraPrimitiveOverestimationSize         = {};
    float                               extraPrimitiveOverestimationSizeGranularity = {};
    VULKAN_HPP_NAMESPACE::Bool32        primitiveUnderestimation                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        conservativePointAndLineRasterization       = {};
    VULKAN_HPP_NAMESPACE::Bool32        degenerateTrianglesRasterized               = {};
    VULKAN_HPP_NAMESPACE::Bool32        degenerateLinesRasterized                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        fullyCoveredFragmentShaderInputVariable     = {};
    VULKAN_HPP_NAMESPACE::Bool32        conservativeRasterizationPostDepthCoverage  = {};
  };
  static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceConservativeRasterizationPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
  {
    using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
  };

  struct PhysicalDeviceCooperativeMatrixFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT
      : cooperativeMatrix( cooperativeMatrix_ )
      , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(
      PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeMatrixFeaturesNV(
          *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV &
                            operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrixFeaturesNV &
      operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceCooperativeMatrixFeaturesNV &
      setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrix = cooperativeMatrix_;
      return *this;
    }

    PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess(
      VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
      return *this;
    }

    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) &&
             ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
    }

    bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
    void *                              pNext             = {};
    VULKAN_HPP_NAMESPACE::Bool32        cooperativeMatrix = {};
    VULKAN_HPP_NAMESPACE::Bool32        cooperativeMatrixRobustBufferAccess = {};
  };
  static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrixFeaturesNV;
  };

  struct PhysicalDeviceCooperativeMatrixPropertiesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(
      VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT
      : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(
      PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCooperativeMatrixPropertiesNV(
          *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixPropertiesNV &
                            operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCooperativeMatrixPropertiesNV &
      operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
    }

    bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
    void *                                 pNext = {};
    VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
  };
  static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) ==
                   sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixPropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV>
  {
    using Type = PhysicalDeviceCooperativeMatrixPropertiesNV;
  };

  struct PhysicalDeviceCornerSampledImageFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT
      : cornerSampledImage( cornerSampledImage_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(
      PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCornerSampledImageFeaturesNV(
          *reinterpret_cast<PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV &
                            operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCornerSampledImageFeaturesNV &
      operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceCornerSampledImageFeaturesNV &
      setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
    {
      cornerSampledImage = cornerSampledImage_;
      return *this;
    }

    operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage );
    }

    bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType              = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
    void *                              pNext              = {};
    VULKAN_HPP_NAMESPACE::Bool32        cornerSampledImage = {};
  };
  static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceCornerSampledImageFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV>
  {
    using Type = PhysicalDeviceCornerSampledImageFeaturesNV;
  };

  struct PhysicalDeviceCoverageReductionModeFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT
      : coverageReductionMode( coverageReductionMode_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(
      PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCoverageReductionModeFeaturesNV(
          *reinterpret_cast<PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV &
                            operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCoverageReductionModeFeaturesNV &
      operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceCoverageReductionModeFeaturesNV &
      setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageReductionMode = coverageReductionMode_;
      return *this;
    }

    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode );
    }

    bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        coverageReductionMode = {};
  };
  static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceCoverageReductionModeFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV>
  {
    using Type = PhysicalDeviceCoverageReductionModeFeaturesNV;
  };

  struct PhysicalDeviceCustomBorderColorFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_             = {},
      VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {} ) VULKAN_HPP_NOEXCEPT
      : customBorderColors( customBorderColors_ )
      , customBorderColorWithoutFormat( customBorderColorWithoutFormat_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(
      PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCustomBorderColorFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT &
                            operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCustomBorderColorFeaturesEXT &
      operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceCustomBorderColorFeaturesEXT &
      setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
    {
      customBorderColors = customBorderColors_;
      return *this;
    }

    PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat(
      VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
    {
      customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
      return *this;
    }

    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) &&
             ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat );
    }

    bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType              = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
    void *                              pNext              = {};
    VULKAN_HPP_NAMESPACE::Bool32        customBorderColors = {};
    VULKAN_HPP_NAMESPACE::Bool32        customBorderColorWithoutFormat = {};
  };
  static_assert( sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceCustomBorderColorFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
  {
    using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
  };

  struct PhysicalDeviceCustomBorderColorPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(
      PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceCustomBorderColorPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorPropertiesEXT &
                            operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceCustomBorderColorPropertiesEXT &
      operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
    }

    bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
    void *                              pNext = {};
    uint32_t                            maxCustomBorderColorSamplers = {};
  };
  static_assert( sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceCustomBorderColorPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
  {
    using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
  };

  struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT
      : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(
      PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(
      VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(
          *reinterpret_cast<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &
                            operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &
      operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>(
        &rhs );
      return *this;
    }

    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing(
      VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
    {
      dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
      return *this;
    }

    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
    }

    bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType =
      StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
    void *                       pNext                            = {};
    VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
  };
  static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
  {
    using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
  };

  struct PhysicalDeviceDepthClipEnableFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT
      : depthClipEnable( depthClipEnable_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(
      PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDepthClipEnableFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT &
                            operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthClipEnableFeaturesEXT &
      operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceDepthClipEnableFeaturesEXT &
      setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClipEnable = depthClipEnable_;
      return *this;
    }

    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable );
    }

    bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
    void *                              pNext           = {};
    VULKAN_HPP_NAMESPACE::Bool32        depthClipEnable = {};
  };
  static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDepthClipEnableFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
  {
    using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
  };

  struct PhysicalDeviceDepthStencilResolveProperties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDepthStencilResolveProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(
      VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_   = {},
      VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {},
      VULKAN_HPP_NAMESPACE::Bool32           independentResolveNone_       = {},
      VULKAN_HPP_NAMESPACE::Bool32           independentResolve_           = {} ) VULKAN_HPP_NOEXCEPT
      : supportedDepthResolveModes( supportedDepthResolveModes_ )
      , supportedStencilResolveModes( supportedStencilResolveModes_ )
      , independentResolveNone( independentResolveNone_ )
      , independentResolve( independentResolve_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(
      PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDepthStencilResolveProperties(
          *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthStencilResolveProperties &
                            operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDepthStencilResolveProperties &
      operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties *>( this );
    }

    operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) &&
             ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) &&
             ( independentResolveNone == rhs.independentResolveNone ) &&
             ( independentResolve == rhs.independentResolve );
    }

    bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
    void *                                 pNext = {};
    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes   = {};
    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
    VULKAN_HPP_NAMESPACE::Bool32           independentResolveNone       = {};
    VULKAN_HPP_NAMESPACE::Bool32           independentResolve           = {};
  };
  static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) ==
                   sizeof( VkPhysicalDeviceDepthStencilResolveProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDepthStencilResolveProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
  {
    using Type = PhysicalDeviceDepthStencilResolveProperties;
  };
  using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;

  struct PhysicalDeviceDescriptorIndexingFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDescriptorIndexingFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_         = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_         = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_    = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_    = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_                    = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_           = {},
      VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_                             = {} ) VULKAN_HPP_NOEXCEPT
      : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
      , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
      , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
      , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
      , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
      , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
      , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
      , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
      , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
      , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
      , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
      , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
      , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
      , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
      , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
      , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
      , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
      , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
      , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
      , runtimeDescriptorArray( runtimeDescriptorArray_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(
      PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorIndexingFeatures(
          *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures &
                            operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorIndexingFeatures &
      operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
      return *this;
    }

    PhysicalDeviceDescriptorIndexingFeatures &
      setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
    {
      runtimeDescriptorArray = runtimeDescriptorArray_;
      return *this;
    }

    operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
    }

    operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) &&
             ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) &&
             ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) &&
             ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) &&
             ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) &&
             ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) &&
             ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) &&
             ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) &&
             ( shaderUniformTexelBufferArrayNonUniformIndexing ==
               rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
             ( shaderStorageTexelBufferArrayNonUniformIndexing ==
               rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
             ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) &&
             ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) &&
             ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) &&
             ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) &&
             ( descriptorBindingUniformTexelBufferUpdateAfterBind ==
               rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
             ( descriptorBindingStorageTexelBufferUpdateAfterBind ==
               rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
             ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) &&
             ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) &&
             ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) &&
             ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
    }

    bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderInputAttachmentArrayDynamicIndexing          = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderUniformTexelBufferArrayDynamicIndexing       = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageTexelBufferArrayDynamicIndexing       = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderUniformBufferArrayNonUniformIndexing         = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSampledImageArrayNonUniformIndexing          = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageBufferArrayNonUniformIndexing         = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageImageArrayNonUniformIndexing          = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderInputAttachmentArrayNonUniformIndexing       = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderUniformTexelBufferArrayNonUniformIndexing    = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageTexelBufferArrayNonUniformIndexing    = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingUniformBufferUpdateAfterBind      = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingSampledImageUpdateAfterBind       = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingStorageImageUpdateAfterBind       = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingStorageBufferUpdateAfterBind      = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingUniformTexelBufferUpdateAfterBind = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingStorageTexelBufferUpdateAfterBind = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingUpdateUnusedWhilePending          = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingPartiallyBound                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingVariableDescriptorCount           = {};
    VULKAN_HPP_NAMESPACE::Bool32        runtimeDescriptorArray                             = {};
  };
  static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) ==
                   sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
  {
    using Type = PhysicalDeviceDescriptorIndexingFeatures;
  };
  using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;

  struct PhysicalDeviceDescriptorIndexingProperties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDescriptorIndexingProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(
      uint32_t                     maxUpdateAfterBindDescriptorsInAllPools_              = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_     = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_     = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_   = {},
      VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_                    = {},
      VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_                             = {},
      uint32_t                     maxPerStageDescriptorUpdateAfterBindSamplers_         = {},
      uint32_t                     maxPerStageDescriptorUpdateAfterBindUniformBuffers_   = {},
      uint32_t                     maxPerStageDescriptorUpdateAfterBindStorageBuffers_   = {},
      uint32_t                     maxPerStageDescriptorUpdateAfterBindSampledImages_    = {},
      uint32_t                     maxPerStageDescriptorUpdateAfterBindStorageImages_    = {},
      uint32_t                     maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
      uint32_t                     maxPerStageUpdateAfterBindResources_                  = {},
      uint32_t                     maxDescriptorSetUpdateAfterBindSamplers_              = {},
      uint32_t                     maxDescriptorSetUpdateAfterBindUniformBuffers_        = {},
      uint32_t                     maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
      uint32_t                     maxDescriptorSetUpdateAfterBindStorageBuffers_        = {},
      uint32_t                     maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
      uint32_t                     maxDescriptorSetUpdateAfterBindSampledImages_         = {},
      uint32_t                     maxDescriptorSetUpdateAfterBindStorageImages_         = {},
      uint32_t                     maxDescriptorSetUpdateAfterBindInputAttachments_      = {} ) VULKAN_HPP_NOEXCEPT
      : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
      , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
      , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
      , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
      , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
      , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
      , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
      , quadDivergentImplicitLod( quadDivergentImplicitLod_ )
      , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
      , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
      , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
      , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
      , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
      , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
      , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
      , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
      , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
      , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
      , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
      , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
      , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
      , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
      , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(
      PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDescriptorIndexingProperties(
          *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingProperties &
                            operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDescriptorIndexingProperties &
      operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties *>( this );
    }

    operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) &&
             ( shaderUniformBufferArrayNonUniformIndexingNative ==
               rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
             ( shaderSampledImageArrayNonUniformIndexingNative ==
               rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
             ( shaderStorageBufferArrayNonUniformIndexingNative ==
               rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
             ( shaderStorageImageArrayNonUniformIndexingNative ==
               rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
             ( shaderInputAttachmentArrayNonUniformIndexingNative ==
               rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
             ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) &&
             ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
             ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) &&
             ( maxPerStageDescriptorUpdateAfterBindUniformBuffers ==
               rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
             ( maxPerStageDescriptorUpdateAfterBindStorageBuffers ==
               rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
             ( maxPerStageDescriptorUpdateAfterBindSampledImages ==
               rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
             ( maxPerStageDescriptorUpdateAfterBindStorageImages ==
               rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
             ( maxPerStageDescriptorUpdateAfterBindInputAttachments ==
               rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
             ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) &&
             ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) &&
             ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) &&
             ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ==
               rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) &&
             ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ==
               rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) &&
             ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) &&
             ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
    }

    bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
    void *                              pNext = {};
    uint32_t                            maxUpdateAfterBindDescriptorsInAllPools              = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderUniformBufferArrayNonUniformIndexingNative     = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSampledImageArrayNonUniformIndexingNative      = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageBufferArrayNonUniformIndexingNative     = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageImageArrayNonUniformIndexingNative      = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderInputAttachmentArrayNonUniformIndexingNative   = {};
    VULKAN_HPP_NAMESPACE::Bool32        robustBufferAccessUpdateAfterBind                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        quadDivergentImplicitLod                             = {};
    uint32_t                            maxPerStageDescriptorUpdateAfterBindSamplers         = {};
    uint32_t                            maxPerStageDescriptorUpdateAfterBindUniformBuffers   = {};
    uint32_t                            maxPerStageDescriptorUpdateAfterBindStorageBuffers   = {};
    uint32_t                            maxPerStageDescriptorUpdateAfterBindSampledImages    = {};
    uint32_t                            maxPerStageDescriptorUpdateAfterBindStorageImages    = {};
    uint32_t                            maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
    uint32_t                            maxPerStageUpdateAfterBindResources                  = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindSamplers              = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindUniformBuffers        = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindStorageBuffers        = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindSampledImages         = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindStorageImages         = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindInputAttachments      = {};
  };
  static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) ==
                   sizeof( VkPhysicalDeviceDescriptorIndexingProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
  {
    using Type = PhysicalDeviceDescriptorIndexingProperties;
  };
  using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;

  struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {} ) VULKAN_HPP_NOEXCEPT
      : deviceGeneratedCommands( deviceGeneratedCommands_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(
      PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(
          *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &
                            operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &
      operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceDeviceGeneratedCommandsFeaturesNV &
      setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceGeneratedCommands = deviceGeneratedCommands_;
      return *this;
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
    }

    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        deviceGeneratedCommands = {};
  };
  static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
  };

  struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(
      uint32_t maxGraphicsShaderGroupCount_              = {},
      uint32_t maxIndirectSequenceCount_                 = {},
      uint32_t maxIndirectCommandsTokenCount_            = {},
      uint32_t maxIndirectCommandsStreamCount_           = {},
      uint32_t maxIndirectCommandsTokenOffset_           = {},
      uint32_t maxIndirectCommandsStreamStride_          = {},
      uint32_t minSequencesCountBufferOffsetAlignment_   = {},
      uint32_t minSequencesIndexBufferOffsetAlignment_   = {},
      uint32_t minIndirectCommandsBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ )
      , maxIndirectSequenceCount( maxIndirectSequenceCount_ )
      , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ )
      , maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ )
      , maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ )
      , maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ )
      , minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ )
      , minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ )
      , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(
      PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(
          *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsPropertiesNV &
                            operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceGeneratedCommandsPropertiesNV &
      operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) &&
             ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) &&
             ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) &&
             ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) &&
             ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) &&
             ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) &&
             ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) &&
             ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) &&
             ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment );
    }

    bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
    void *                              pNext = {};
    uint32_t                            maxGraphicsShaderGroupCount              = {};
    uint32_t                            maxIndirectSequenceCount                 = {};
    uint32_t                            maxIndirectCommandsTokenCount            = {};
    uint32_t                            maxIndirectCommandsStreamCount           = {};
    uint32_t                            maxIndirectCommandsTokenOffset           = {};
    uint32_t                            maxIndirectCommandsStreamStride          = {};
    uint32_t                            minSequencesCountBufferOffsetAlignment   = {};
    uint32_t                            minSequencesIndexBufferOffsetAlignment   = {};
    uint32_t                            minIndirectCommandsBufferOffsetAlignment = {};
  };
  static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) ==
                   sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
  {
    using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
  };

  struct PhysicalDeviceDeviceMemoryReportFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {} ) VULKAN_HPP_NOEXCEPT
      : deviceMemoryReport( deviceMemoryReport_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(
      PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDeviceMemoryReportFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT &
                            operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDeviceMemoryReportFeaturesEXT &
      operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceDeviceMemoryReportFeaturesEXT &
      setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT
    {
      deviceMemoryReport = deviceMemoryReport_;
      return *this;
    }

    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport );
    }

    bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        deviceMemoryReport = {};
  };
  static_assert( sizeof( PhysicalDeviceDeviceMemoryReportFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT>
  {
    using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT;
  };

  struct PhysicalDeviceDiagnosticsConfigFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {} ) VULKAN_HPP_NOEXCEPT
      : diagnosticsConfig( diagnosticsConfig_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(
      PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDiagnosticsConfigFeaturesNV(
          *reinterpret_cast<PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV &
                            operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDiagnosticsConfigFeaturesNV &
      operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceDiagnosticsConfigFeaturesNV &
      setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT
    {
      diagnosticsConfig = diagnosticsConfig_;
      return *this;
    }

    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig );
    }

    bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
    void *                              pNext             = {};
    VULKAN_HPP_NAMESPACE::Bool32        diagnosticsConfig = {};
  };
  static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDiagnosticsConfigFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV>
  {
    using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV;
  };

  struct PhysicalDeviceDiscardRectanglePropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxDiscardRectangles( maxDiscardRectangles_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(
      PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDiscardRectanglePropertiesEXT(
          *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiscardRectanglePropertiesEXT &
                            operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDiscardRectanglePropertiesEXT &
      operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
    }

    bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
    void *                              pNext = {};
    uint32_t                            maxDiscardRectangles = {};
  };
  static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDiscardRectanglePropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
  {
    using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
  };

  struct PhysicalDeviceDriverProperties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(
      VULKAN_HPP_NAMESPACE::DriverId                    driverID_   = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
      std::array<char, VK_MAX_DRIVER_NAME_SIZE> const & driverName_ = {},
      std::array<char, VK_MAX_DRIVER_INFO_SIZE> const & driverInfo_ = {},
      VULKAN_HPP_NAMESPACE::ConformanceVersion          conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT
      : driverID( driverID_ )
      , driverName( driverName_ )
      , driverInfo( driverInfo_ )
      , conformanceVersion( conformanceVersion_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceDriverProperties( *reinterpret_cast<PhysicalDeviceDriverProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties &
                            operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceDriverProperties *>( this );
    }

    operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceDriverProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) &&
             ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) &&
             ( conformanceVersion == rhs.conformanceVersion );
    }

    bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::ePhysicalDeviceDriverProperties;
    void *                              pNext    = {};
    VULKAN_HPP_NAMESPACE::DriverId      driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName         = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo         = {};
    VULKAN_HPP_NAMESPACE::ConformanceVersion                            conformanceVersion = {};
  };
  static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceDriverProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
  {
    using Type = PhysicalDeviceDriverProperties;
  };
  using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;

  struct PhysicalDeviceExclusiveScissorFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} )
      VULKAN_HPP_NOEXCEPT : exclusiveScissor( exclusiveScissor_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(
      PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExclusiveScissorFeaturesNV(
          *reinterpret_cast<PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV &
                            operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExclusiveScissorFeaturesNV &
      operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceExclusiveScissorFeaturesNV &
      setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
    {
      exclusiveScissor = exclusiveScissor_;
      return *this;
    }

    operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor );
    }

    bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
    void *                              pNext            = {};
    VULKAN_HPP_NAMESPACE::Bool32        exclusiveScissor = {};
  };
  static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceExclusiveScissorFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV>
  {
    using Type = PhysicalDeviceExclusiveScissorFeaturesNV;
  };

  struct PhysicalDeviceExtendedDynamicState2FeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_            = {},
      VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT
      : extendedDynamicState2( extendedDynamicState2_ )
      , extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ )
      , extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT(
      PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExtendedDynamicState2FeaturesEXT(
          *reinterpret_cast<PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT &
                            operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedDynamicState2FeaturesEXT &
      operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceExtendedDynamicState2FeaturesEXT &
      setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState2 = extendedDynamicState2_;
      return *this;
    }

    PhysicalDeviceExtendedDynamicState2FeaturesEXT &
      setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_;
      return *this;
    }

    PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints(
      VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_;
      return *this;
    }

    operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( extendedDynamicState2 == rhs.extendedDynamicState2 ) &&
             ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) &&
             ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints );
    }

    bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        extendedDynamicState2                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        extendedDynamicState2LogicOp            = {};
    VULKAN_HPP_NAMESPACE::Bool32        extendedDynamicState2PatchControlPoints = {};
  };
  static_assert( sizeof( PhysicalDeviceExtendedDynamicState2FeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
  };

  struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT
      : extendedDynamicState( extendedDynamicState_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(
      PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExtendedDynamicStateFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT &
                            operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExtendedDynamicStateFeaturesEXT &
      operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceExtendedDynamicStateFeaturesEXT &
      setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
    {
      extendedDynamicState = extendedDynamicState_;
      return *this;
    }

    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState );
    }

    bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        extendedDynamicState = {};
  };
  static_assert( sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
  {
    using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
  };

  struct PhysicalDeviceExternalImageFormatInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceExternalImageFormatInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ =
        VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT
      : handleType( handleType_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalImageFormatInfo(
          *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo &
                            operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalImageFormatInfo &
      operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
      return *this;
    }

    PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceExternalImageFormatInfo &
      setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
    {
      handleType = handleType_;
      return *this;
    }

    operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo *>( this );
    }

    operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType );
    }

    bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType =
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  };
  static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceExternalImageFormatInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
  {
    using Type = PhysicalDeviceExternalImageFormatInfo;
  };
  using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;

  struct PhysicalDeviceExternalMemoryHostPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(
      VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
      : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(
      PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceExternalMemoryHostPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryHostPropertiesEXT &
                            operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceExternalMemoryHostPropertiesEXT &
      operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
    }

    bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    minImportedHostPointerAlignment = {};
  };
  static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceExternalMemoryHostPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
  {
    using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
  };

  struct PhysicalDeviceFloatControlsProperties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFloatControlsProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(
      VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ =
        VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
      VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ =
        VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
      VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_           = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_           = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_           = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_        = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_        = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_        = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_          = {} ) VULKAN_HPP_NOEXCEPT
      : denormBehaviorIndependence( denormBehaviorIndependence_ )
      , roundingModeIndependence( roundingModeIndependence_ )
      , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
      , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
      , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
      , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
      , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
      , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
      , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
      , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
      , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
      , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
      , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
      , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
      , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
      , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
      , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFloatControlsProperties(
          *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFloatControlsProperties &
                            operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFloatControlsProperties &
      operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties *>( this );
    }

    operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) &&
             ( roundingModeIndependence == rhs.roundingModeIndependence ) &&
             ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) &&
             ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) &&
             ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) &&
             ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) &&
             ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
             ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) &&
             ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
             ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) &&
             ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) &&
             ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
             ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) &&
             ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
             ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) &&
             ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
             ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
    }

    bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType = StructureType::ePhysicalDeviceFloatControlsProperties;
    void *                                                pNext = {};
    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence =
      VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence =
      VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16           = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32           = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64           = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16        = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32        = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64        = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16          = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32          = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64          = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16          = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32          = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64          = {};
  };
  static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFloatControlsProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
  {
    using Type = PhysicalDeviceFloatControlsProperties;
  };
  using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;

  struct PhysicalDeviceFragmentDensityMap2FeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {} ) VULKAN_HPP_NOEXCEPT
      : fragmentDensityMapDeferred( fragmentDensityMapDeferred_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(
      PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMap2FeaturesEXT(
          *reinterpret_cast<PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT &
                            operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMap2FeaturesEXT &
      operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceFragmentDensityMap2FeaturesEXT &
      setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapDeferred = fragmentDensityMapDeferred_;
      return *this;
    }

    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
    }

    bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentDensityMapDeferred = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
  };

  struct PhysicalDeviceFragmentDensityMap2PropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_                           = {},
      VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {},
      uint32_t                     maxSubsampledArrayLayers_                  = {},
      uint32_t                     maxDescriptorSetSubsampledSamplers_        = {} ) VULKAN_HPP_NOEXCEPT
      : subsampledLoads( subsampledLoads_ )
      , subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ )
      , maxSubsampledArrayLayers( maxSubsampledArrayLayers_ )
      , maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(
      PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMap2PropertiesEXT(
          *reinterpret_cast<PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2PropertiesEXT &
                            operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMap2PropertiesEXT &
      operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) &&
             ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) &&
             ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) &&
             ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
    }

    bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        subsampledLoads                           = {};
    VULKAN_HPP_NAMESPACE::Bool32        subsampledCoarseReconstructionEarlyAccess = {};
    uint32_t                            maxSubsampledArrayLayers                  = {};
    uint32_t                            maxDescriptorSetSubsampledSamplers        = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
  };

  struct PhysicalDeviceFragmentDensityMapFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_                    = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_             = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT
      : fragmentDensityMap( fragmentDensityMap_ )
      , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ )
      , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(
      PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMapFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT &
                            operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapFeaturesEXT &
      operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceFragmentDensityMapFeaturesEXT &
      setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMap = fragmentDensityMap_;
      return *this;
    }

    PhysicalDeviceFragmentDensityMapFeaturesEXT &
      setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapDynamic = fragmentDensityMapDynamic_;
      return *this;
    }

    PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages(
      VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_;
      return *this;
    }

    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) &&
             ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) &&
             ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages );
    }

    bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentDensityMap                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentDensityMapDynamic             = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentDensityMapNonSubsampledImages = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT;
  };

  struct PhysicalDeviceFragmentDensityMapPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(
      VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {},
      VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {},
      VULKAN_HPP_NAMESPACE::Bool32   fragmentDensityInvocations_  = {} ) VULKAN_HPP_NOEXCEPT
      : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ )
      , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ )
      , fragmentDensityInvocations( fragmentDensityInvocations_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(
      PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentDensityMapPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapPropertiesEXT &
                            operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentDensityMapPropertiesEXT &
      operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) &&
             ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) &&
             ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
    }

    bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Extent2D      minFragmentDensityTexelSize = {};
    VULKAN_HPP_NAMESPACE::Extent2D      maxFragmentDensityTexelSize = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentDensityInvocations  = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT>
  {
    using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
  };

  struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT
      : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(
      PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShaderBarycentricFeaturesNV(
          *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV &
                            operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShaderBarycentricFeaturesNV &
      operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceFragmentShaderBarycentricFeaturesNV &
      setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShaderBarycentric = fragmentShaderBarycentric_;
      return *this;
    }

    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
    }

    bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentShaderBarycentric = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV>
  {
    using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
  };

  struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT
      : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ )
      , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ )
      , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(
      PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShaderInterlockFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
                            operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
      operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock(
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
      return *this;
    }

    PhysicalDeviceFragmentShaderInterlockFeaturesEXT &
      setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
      return *this;
    }

    PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock(
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
      return *this;
    }

    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) &&
             ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) &&
             ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
    }

    bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentShaderSampleInterlock      = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentShaderPixelInterlock       = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentShaderShadingRateInterlock = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
  {
    using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
  };

  struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_         = {},
      VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_  = {},
      VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {} ) VULKAN_HPP_NOEXCEPT
      : fragmentShadingRateEnums( fragmentShadingRateEnums_ )
      , supersampleFragmentShadingRates( supersampleFragmentShadingRates_ )
      , noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(
      PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(
          *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
                            operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
      operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV &
      setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentShadingRateEnums = fragmentShadingRateEnums_;
      return *this;
    }

    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates(
      VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
    {
      supersampleFragmentShadingRates = supersampleFragmentShadingRates_;
      return *this;
    }

    PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates(
      VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
    {
      noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_;
      return *this;
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) &&
             ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) &&
             ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates );
    }

    bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        fragmentShadingRateEnums         = {};
    VULKAN_HPP_NAMESPACE::Bool32        supersampleFragmentShadingRates  = {};
    VULKAN_HPP_NAMESPACE::Bool32        noInvocationFragmentShadingRates = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
  {
    using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
  };

  struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ =
        VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1 ) VULKAN_HPP_NOEXCEPT
      : maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(
      PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(
      VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(
          *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV &
                            operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV &
      operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setMaxFragmentShadingRateInvocationCount(
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_;
      return *this;
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount );
    }

    bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount =
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  };
  static_assert( sizeof( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) ==
                   sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
  {
    using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
  };

  struct PhysicalDeviceFragmentShadingRateFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_   = {},
      VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_  = {},
      VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {} ) VULKAN_HPP_NOEXCEPT
      : pipelineFragmentShadingRate( pipelineFragmentShadingRate_ )
      , primitiveFragmentShadingRate( primitiveFragmentShadingRate_ )
      , attachmentFragmentShadingRate( attachmentFragmentShadingRate_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(
      PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRateFeaturesKHR(
          *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR &
                            operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRateFeaturesKHR &
      operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceFragmentShadingRateFeaturesKHR &
      setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineFragmentShadingRate = pipelineFragmentShadingRate_;
      return *this;
    }

    PhysicalDeviceFragmentShadingRateFeaturesKHR &
      setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      primitiveFragmentShadingRate = primitiveFragmentShadingRate_;
      return *this;
    }

    PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate(
      VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentFragmentShadingRate = attachmentFragmentShadingRate_;
      return *this;
    }

    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) &&
             ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) &&
             ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
    }

    bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        pipelineFragmentShadingRate   = {};
    VULKAN_HPP_NAMESPACE::Bool32        primitiveFragmentShadingRate  = {};
    VULKAN_HPP_NAMESPACE::Bool32        attachmentFragmentShadingRate = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentShadingRateFeaturesKHR ) ==
                   sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR>
  {
    using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR;
  };

  struct PhysicalDeviceFragmentShadingRatePropertiesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(
      VULKAN_HPP_NAMESPACE::Extent2D            minFragmentShadingRateAttachmentTexelSize_            = {},
      VULKAN_HPP_NAMESPACE::Extent2D            maxFragmentShadingRateAttachmentTexelSize_            = {},
      uint32_t                                  maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {},
      VULKAN_HPP_NAMESPACE::Bool32              primitiveFragmentShadingRateWithMultipleViewports_    = {},
      VULKAN_HPP_NAMESPACE::Bool32              layeredShadingRateAttachments_                        = {},
      VULKAN_HPP_NAMESPACE::Bool32              fragmentShadingRateNonTrivialCombinerOps_             = {},
      VULKAN_HPP_NAMESPACE::Extent2D            maxFragmentSize_                                      = {},
      uint32_t                                  maxFragmentSizeAspectRatio_                           = {},
      uint32_t                                  maxFragmentShadingRateCoverageSamples_                = {},
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ =
        VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1,
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_  = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_                = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_   = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_     = {},
      VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_        = {} ) VULKAN_HPP_NOEXCEPT
      : minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ )
      , maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ )
      , maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ )
      , primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ )
      , layeredShadingRateAttachments( layeredShadingRateAttachments_ )
      , fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ )
      , maxFragmentSize( maxFragmentSize_ )
      , maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ )
      , maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ )
      , maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ )
      , fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ )
      , fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ )
      , fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ )
      , fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ )
      , fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ )
      , fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ )
      , fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(
      PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceFragmentShadingRatePropertiesKHR(
          *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRatePropertiesKHR &
                            operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceFragmentShadingRatePropertiesKHR &
      operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) &&
             ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) &&
             ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio ==
               rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) &&
             ( primitiveFragmentShadingRateWithMultipleViewports ==
               rhs.primitiveFragmentShadingRateWithMultipleViewports ) &&
             ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) &&
             ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) &&
             ( maxFragmentSize == rhs.maxFragmentSize ) &&
             ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) &&
             ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) &&
             ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) &&
             ( fragmentShadingRateWithShaderDepthStencilWrites ==
               rhs.fragmentShadingRateWithShaderDepthStencilWrites ) &&
             ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) &&
             ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) &&
             ( fragmentShadingRateWithConservativeRasterization ==
               rhs.fragmentShadingRateWithConservativeRasterization ) &&
             ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) &&
             ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) &&
             ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner );
    }

    bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
    void *                                    pNext = {};
    VULKAN_HPP_NAMESPACE::Extent2D            minFragmentShadingRateAttachmentTexelSize            = {};
    VULKAN_HPP_NAMESPACE::Extent2D            maxFragmentShadingRateAttachmentTexelSize            = {};
    uint32_t                                  maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {};
    VULKAN_HPP_NAMESPACE::Bool32              primitiveFragmentShadingRateWithMultipleViewports    = {};
    VULKAN_HPP_NAMESPACE::Bool32              layeredShadingRateAttachments                        = {};
    VULKAN_HPP_NAMESPACE::Bool32              fragmentShadingRateNonTrivialCombinerOps             = {};
    VULKAN_HPP_NAMESPACE::Extent2D            maxFragmentSize                                      = {};
    uint32_t                                  maxFragmentSizeAspectRatio                           = {};
    uint32_t                                  maxFragmentShadingRateCoverageSamples                = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples =
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites  = {};
    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask                = {};
    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask          = {};
    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {};
    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock   = {};
    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations     = {};
    VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner        = {};
  };
  static_assert( sizeof( PhysicalDeviceFragmentShadingRatePropertiesKHR ) ==
                   sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRatePropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR>
  {
    using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
  };

  struct PhysicalDeviceGlobalPriorityQueryFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {} ) VULKAN_HPP_NOEXCEPT
      : globalPriorityQuery( globalPriorityQuery_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesEXT(
      PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceGlobalPriorityQueryFeaturesEXT( VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceGlobalPriorityQueryFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceGlobalPriorityQueryFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesEXT &
                            operator=( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceGlobalPriorityQueryFeaturesEXT &
      operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceGlobalPriorityQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceGlobalPriorityQueryFeaturesEXT &
      setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT
    {
      globalPriorityQuery = globalPriorityQuery_;
      return *this;
    }

    operator VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery );
    }

    bool operator!=( PhysicalDeviceGlobalPriorityQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        globalPriorityQuery = {};
  };
  static_assert( sizeof( PhysicalDeviceGlobalPriorityQueryFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceGlobalPriorityQueryFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesEXT>
  {
    using Type = PhysicalDeviceGlobalPriorityQueryFeaturesEXT;
  };

  struct PhysicalDeviceGroupProperties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(
      uint32_t                                                                           physicalDeviceCount_ = {},
      std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> const & physicalDevices_     = {},
      VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT
      : physicalDeviceCount( physicalDeviceCount_ )
      , physicalDevices( physicalDevices_ )
      , subsetAllocation( subsetAllocation_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceGroupProperties( *reinterpret_cast<PhysicalDeviceGroupProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties &
                            operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceGroupProperties *>( this );
    }

    operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceGroupProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) &&
             ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation );
    }

    bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::ePhysicalDeviceGroupProperties;
    void *                              pNext               = {};
    uint32_t                            physicalDeviceCount = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE>
                                 physicalDevices  = {};
    VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
  };
  static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceGroupProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
  {
    using Type = PhysicalDeviceGroupProperties;
  };
  using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;

  struct PhysicalDeviceHostQueryResetFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceHostQueryResetFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT
      : hostQueryReset( hostQueryReset_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast<PhysicalDeviceHostQueryResetFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures &
                            operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceHostQueryResetFeatures &
      operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceHostQueryResetFeatures &
      setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
    {
      hostQueryReset = hostQueryReset_;
      return *this;
    }

    operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures *>( this );
    }

    operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset );
    }

    bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::ePhysicalDeviceHostQueryResetFeatures;
    void *                              pNext          = {};
    VULKAN_HPP_NAMESPACE::Bool32        hostQueryReset = {};
  };
  static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceHostQueryResetFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
  {
    using Type = PhysicalDeviceHostQueryResetFeatures;
  };
  using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;

  struct PhysicalDeviceIDProperties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceIDProperties( std::array<uint8_t, VK_UUID_SIZE> const & deviceUUID_      = {},
                                  std::array<uint8_t, VK_UUID_SIZE> const & driverUUID_      = {},
                                  std::array<uint8_t, VK_LUID_SIZE> const & deviceLUID_      = {},
                                  uint32_t                                  deviceNodeMask_  = {},
                                  VULKAN_HPP_NAMESPACE::Bool32              deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT
      : deviceUUID( deviceUUID_ )
      , driverUUID( driverUUID_ )
      , deviceLUID( deviceLUID_ )
      , deviceNodeMask( deviceNodeMask_ )
      , deviceLUIDValid( deviceLUIDValid_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceIDProperties( *reinterpret_cast<PhysicalDeviceIDProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties &
                            operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceIDProperties *>( this );
    }

    operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceIDProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceIDProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) &&
             ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) &&
             ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid );
    }

    bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                         sType      = StructureType::ePhysicalDeviceIdProperties;
    void *                                                      pNext      = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
    uint32_t                                                    deviceNodeMask  = {};
    VULKAN_HPP_NAMESPACE::Bool32                                deviceLUIDValid = {};
  };
  static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceIDProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
  {
    using Type = PhysicalDeviceIDProperties;
  };
  using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;

  struct PhysicalDeviceImageDrmFormatModifierInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(
      uint64_t                          drmFormatModifier_     = {},
      VULKAN_HPP_NAMESPACE::SharingMode sharingMode_           = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive,
      uint32_t                          queueFamilyIndexCount_ = {},
      const uint32_t *                  pQueueFamilyIndices_   = {} ) VULKAN_HPP_NOEXCEPT
      : drmFormatModifier( drmFormatModifier_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( queueFamilyIndexCount_ )
      , pQueueFamilyIndices( pQueueFamilyIndices_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(
      PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageDrmFormatModifierInfoEXT(
          *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceImageDrmFormatModifierInfoEXT(
      uint64_t                                                              drmFormatModifier_,
      VULKAN_HPP_NAMESPACE::SharingMode                                     sharingMode_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
      : drmFormatModifier( drmFormatModifier_ )
      , sharingMode( sharingMode_ )
      , queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) )
      , pQueueFamilyIndices( queueFamilyIndices_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT &
                            operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageDrmFormatModifierInfoEXT &
      operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceImageDrmFormatModifierInfoEXT &
      setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
    {
      drmFormatModifier = drmFormatModifier_;
      return *this;
    }

    PhysicalDeviceImageDrmFormatModifierInfoEXT &
      setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
    {
      sharingMode = sharingMode_;
      return *this;
    }

    PhysicalDeviceImageDrmFormatModifierInfoEXT &
      setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = queueFamilyIndexCount_;
      return *this;
    }

    PhysicalDeviceImageDrmFormatModifierInfoEXT &
      setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      pQueueFamilyIndices = pQueueFamilyIndices_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
    {
      queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
      pQueueFamilyIndices   = queueFamilyIndices_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
    }

    operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) &&
             ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) &&
             ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
    }

    bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
    const void *                        pNext             = {};
    uint64_t                            drmFormatModifier = {};
    VULKAN_HPP_NAMESPACE::SharingMode   sharingMode       = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
    uint32_t                            queueFamilyIndexCount = {};
    const uint32_t *                    pQueueFamilyIndices   = {};
  };
  static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) ==
                   sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceImageDrmFormatModifierInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
  {
    using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
  };

  struct PhysicalDeviceImageRobustnessFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {} ) VULKAN_HPP_NOEXCEPT
      : robustImageAccess( robustImageAccess_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT(
      PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageRobustnessFeaturesEXT( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageRobustnessFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceImageRobustnessFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeaturesEXT &
                            operator=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageRobustnessFeaturesEXT &
      operator=( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceImageRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceImageRobustnessFeaturesEXT &
      setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      robustImageAccess = robustImageAccess_;
      return *this;
    }

    operator VkPhysicalDeviceImageRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceImageRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageRobustnessFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess );
    }

    bool operator!=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT;
    void *                              pNext             = {};
    VULKAN_HPP_NAMESPACE::Bool32        robustImageAccess = {};
  };
  static_assert( sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceImageRobustnessFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceImageRobustnessFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT>
  {
    using Type = PhysicalDeviceImageRobustnessFeaturesEXT;
  };

  struct PhysicalDeviceImageViewImageFormatInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ =
                                                   VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT
      : imageViewType( imageViewType_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(
      PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImageViewImageFormatInfoEXT(
          *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT &
                            operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImageViewImageFormatInfoEXT &
      operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceImageViewImageFormatInfoEXT &
      setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
    {
      imageViewType = imageViewType_;
      return *this;
    }

    operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
    }

    operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType );
    }

    bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
    void *                              pNext         = {};
    VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
  };
  static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) ==
                   sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceImageViewImageFormatInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
  {
    using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
  };

  struct PhysicalDeviceImagelessFramebufferFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceImagelessFramebufferFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT
      : imagelessFramebuffer( imagelessFramebuffer_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(
      PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceImagelessFramebufferFeatures(
          *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures &
                            operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceImagelessFramebufferFeatures &
      operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceImagelessFramebufferFeatures &
      setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      imagelessFramebuffer = imagelessFramebuffer_;
      return *this;
    }

    operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
    }

    operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer );
    }

    bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        imagelessFramebuffer = {};
  };
  static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) ==
                   sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceImagelessFramebufferFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
  {
    using Type = PhysicalDeviceImagelessFramebufferFeatures;
  };
  using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;

  struct PhysicalDeviceIndexTypeUint8FeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT
      : indexTypeUint8( indexTypeUint8_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceIndexTypeUint8FeaturesEXT(
          *reinterpret_cast<PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT &
                            operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceIndexTypeUint8FeaturesEXT &
      operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceIndexTypeUint8FeaturesEXT &
      setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
    {
      indexTypeUint8 = indexTypeUint8_;
      return *this;
    }

    operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 );
    }

    bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
    void *                              pNext          = {};
    VULKAN_HPP_NAMESPACE::Bool32        indexTypeUint8 = {};
  };
  static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceIndexTypeUint8FeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT>
  {
    using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT;
  };

  struct PhysicalDeviceInheritedViewportScissorFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {} ) VULKAN_HPP_NOEXCEPT
      : inheritedViewportScissor2D( inheritedViewportScissor2D_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV(
      PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceInheritedViewportScissorFeaturesNV(
          *reinterpret_cast<PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV &
                            operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInheritedViewportScissorFeaturesNV &
      operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceInheritedViewportScissorFeaturesNV &
      setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT
    {
      inheritedViewportScissor2D = inheritedViewportScissor2D_;
      return *this;
    }

    operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceInheritedViewportScissorFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceInheritedViewportScissorFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D );
    }

    bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        inheritedViewportScissor2D = {};
  };
  static_assert( sizeof( PhysicalDeviceInheritedViewportScissorFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceInheritedViewportScissorFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV>
  {
    using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV;
  };

  struct PhysicalDeviceInlineUniformBlockFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_                                 = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT
      : inlineUniformBlock( inlineUniformBlock_ )
      , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT(
      PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceInlineUniformBlockFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeaturesEXT &
                            operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInlineUniformBlockFeaturesEXT &
      operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceInlineUniformBlockFeaturesEXT &
      setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
    {
      inlineUniformBlock = inlineUniformBlock_;
      return *this;
    }

    PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
      return *this;
    }

    operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) &&
             ( descriptorBindingInlineUniformBlockUpdateAfterBind ==
               rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
    }

    bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        inlineUniformBlock                                 = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingInlineUniformBlockUpdateAfterBind = {};
  };
  static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT>
  {
    using Type = PhysicalDeviceInlineUniformBlockFeaturesEXT;
  };

  struct PhysicalDeviceInlineUniformBlockPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT(
      uint32_t maxInlineUniformBlockSize_                               = {},
      uint32_t maxPerStageDescriptorInlineUniformBlocks_                = {},
      uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {},
      uint32_t maxDescriptorSetInlineUniformBlocks_                     = {},
      uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_      = {} ) VULKAN_HPP_NOEXCEPT
      : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ )
      , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ )
      , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks(
          maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ )
      , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ )
      , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT(
      PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceInlineUniformBlockPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceInlineUniformBlockPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockPropertiesEXT &
                            operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceInlineUniformBlockPropertiesEXT &
      operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) &&
             ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) &&
             ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ==
               rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) &&
             ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) &&
             ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks ==
               rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
    }

    bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
    void *                              pNext = {};
    uint32_t                            maxInlineUniformBlockSize                               = {};
    uint32_t                            maxPerStageDescriptorInlineUniformBlocks                = {};
    uint32_t                            maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
    uint32_t                            maxDescriptorSetInlineUniformBlocks                     = {};
    uint32_t                            maxDescriptorSetUpdateAfterBindInlineUniformBlocks      = {};
  };
  static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT>
  {
    using Type = PhysicalDeviceInlineUniformBlockPropertiesEXT;
  };

  struct PhysicalDeviceLineRasterizationFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_         = {},
      VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_           = {},
      VULKAN_HPP_NAMESPACE::Bool32 smoothLines_              = {},
      VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_   = {},
      VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_      = {} ) VULKAN_HPP_NOEXCEPT
      : rectangularLines( rectangularLines_ )
      , bresenhamLines( bresenhamLines_ )
      , smoothLines( smoothLines_ )
      , stippledRectangularLines( stippledRectangularLines_ )
      , stippledBresenhamLines( stippledBresenhamLines_ )
      , stippledSmoothLines( stippledSmoothLines_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(
      PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLineRasterizationFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT &
                            operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLineRasterizationFeaturesEXT &
      operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceLineRasterizationFeaturesEXT &
      setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
    {
      rectangularLines = rectangularLines_;
      return *this;
    }

    PhysicalDeviceLineRasterizationFeaturesEXT &
      setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
    {
      bresenhamLines = bresenhamLines_;
      return *this;
    }

    PhysicalDeviceLineRasterizationFeaturesEXT &
      setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
    {
      smoothLines = smoothLines_;
      return *this;
    }

    PhysicalDeviceLineRasterizationFeaturesEXT &
      setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledRectangularLines = stippledRectangularLines_;
      return *this;
    }

    PhysicalDeviceLineRasterizationFeaturesEXT &
      setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledBresenhamLines = stippledBresenhamLines_;
      return *this;
    }

    PhysicalDeviceLineRasterizationFeaturesEXT &
      setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledSmoothLines = stippledSmoothLines_;
      return *this;
    }

    operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) &&
             ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) &&
             ( stippledRectangularLines == rhs.stippledRectangularLines ) &&
             ( stippledBresenhamLines == rhs.stippledBresenhamLines ) &&
             ( stippledSmoothLines == rhs.stippledSmoothLines );
    }

    bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
    void *                              pNext            = {};
    VULKAN_HPP_NAMESPACE::Bool32        rectangularLines = {};
    VULKAN_HPP_NAMESPACE::Bool32        bresenhamLines   = {};
    VULKAN_HPP_NAMESPACE::Bool32        smoothLines      = {};
    VULKAN_HPP_NAMESPACE::Bool32        stippledRectangularLines = {};
    VULKAN_HPP_NAMESPACE::Bool32        stippledBresenhamLines   = {};
    VULKAN_HPP_NAMESPACE::Bool32        stippledSmoothLines      = {};
  };
  static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT>
  {
    using Type = PhysicalDeviceLineRasterizationFeaturesEXT;
  };

  struct PhysicalDeviceLineRasterizationPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT
      : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(
      PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceLineRasterizationPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationPropertiesEXT &
                            operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceLineRasterizationPropertiesEXT &
      operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
    }

    bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
    void *                              pNext = {};
    uint32_t                            lineSubPixelPrecisionBits = {};
  };
  static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT>
  {
    using Type = PhysicalDeviceLineRasterizationPropertiesEXT;
  };

  struct PhysicalDeviceMaintenance3Properties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceMaintenance3Properties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(
      uint32_t                         maxPerSetDescriptors_    = {},
      VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxPerSetDescriptors( maxPerSetDescriptors_ )
      , maxMemoryAllocationSize( maxMemoryAllocationSize_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMaintenance3Properties( *reinterpret_cast<PhysicalDeviceMaintenance3Properties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance3Properties &
                            operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMaintenance3Properties &
      operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties *>( this );
    }

    operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) &&
             ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
    }

    bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                   = StructureType::ePhysicalDeviceMaintenance3Properties;
    void *                              pNext                   = {};
    uint32_t                            maxPerSetDescriptors    = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    maxMemoryAllocationSize = {};
  };
  static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMaintenance3Properties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
  {
    using Type = PhysicalDeviceMaintenance3Properties;
  };
  using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;

  struct PhysicalDeviceMemoryBudgetPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(
      std::array<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapBudget_ = {},
      std::array<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> const & heapUsage_  = {} ) VULKAN_HPP_NOEXCEPT
      : heapBudget( heapBudget_ )
      , heapUsage( heapUsage_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(
      PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryBudgetPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT &
                            operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryBudgetPropertiesEXT &
      operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) &&
             ( heapUsage == rhs.heapUsage );
    }

    bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage  = {};
  };
  static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMemoryBudgetPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
  {
    using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
  };

  struct PhysicalDeviceMemoryPriorityFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT
      : memoryPriority( memoryPriority_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMemoryPriorityFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT &
                            operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMemoryPriorityFeaturesEXT &
      operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceMemoryPriorityFeaturesEXT &
      setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
    {
      memoryPriority = memoryPriority_;
      return *this;
    }

    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority );
    }

    bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
    void *                              pNext          = {};
    VULKAN_HPP_NAMESPACE::Bool32        memoryPriority = {};
  };
  static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMemoryPriorityFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT>
  {
    using Type = PhysicalDeviceMemoryPriorityFeaturesEXT;
  };

  struct PhysicalDeviceMeshShaderFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceMeshShaderFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {},
                                          VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT
      : taskShader( taskShader_ )
      , meshShader( meshShader_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV &
                            operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMeshShaderFeaturesNV &
      operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
    {
      taskShader = taskShader_;
      return *this;
    }

    PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
    {
      meshShader = meshShader_;
      return *this;
    }

    operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) &&
             ( meshShader == rhs.meshShader );
    }

    bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType      = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
    void *                              pNext      = {};
    VULKAN_HPP_NAMESPACE::Bool32        taskShader = {};
    VULKAN_HPP_NAMESPACE::Bool32        meshShader = {};
  };
  static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesNV>
  {
    using Type = PhysicalDeviceMeshShaderFeaturesNV;
  };

  struct PhysicalDeviceMeshShaderPropertiesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceMeshShaderPropertiesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceMeshShaderPropertiesNV( uint32_t                        maxDrawMeshTasksCount_          = {},
                                            uint32_t                        maxTaskWorkGroupInvocations_    = {},
                                            std::array<uint32_t, 3> const & maxTaskWorkGroupSize_           = {},
                                            uint32_t                        maxTaskTotalMemorySize_         = {},
                                            uint32_t                        maxTaskOutputCount_             = {},
                                            uint32_t                        maxMeshWorkGroupInvocations_    = {},
                                            std::array<uint32_t, 3> const & maxMeshWorkGroupSize_           = {},
                                            uint32_t                        maxMeshTotalMemorySize_         = {},
                                            uint32_t                        maxMeshOutputVertices_          = {},
                                            uint32_t                        maxMeshOutputPrimitives_        = {},
                                            uint32_t                        maxMeshMultiviewViewCount_      = {},
                                            uint32_t                        meshOutputPerVertexGranularity_ = {},
                                            uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ )
      , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ )
      , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ )
      , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ )
      , maxTaskOutputCount( maxTaskOutputCount_ )
      , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ )
      , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ )
      , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ )
      , maxMeshOutputVertices( maxMeshOutputVertices_ )
      , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ )
      , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ )
      , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ )
      , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV &
                            operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMeshShaderPropertiesNV &
      operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) &&
             ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) &&
             ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) &&
             ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) &&
             ( maxTaskOutputCount == rhs.maxTaskOutputCount ) &&
             ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) &&
             ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) &&
             ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) &&
             ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) &&
             ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) &&
             ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) &&
             ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) &&
             ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
    }

    bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
    void *                                            pNext = {};
    uint32_t                                          maxDrawMeshTasksCount             = {};
    uint32_t                                          maxTaskWorkGroupInvocations       = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize              = {};
    uint32_t                                          maxTaskTotalMemorySize            = {};
    uint32_t                                          maxTaskOutputCount                = {};
    uint32_t                                          maxMeshWorkGroupInvocations       = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize              = {};
    uint32_t                                          maxMeshTotalMemorySize            = {};
    uint32_t                                          maxMeshOutputVertices             = {};
    uint32_t                                          maxMeshOutputPrimitives           = {};
    uint32_t                                          maxMeshMultiviewViewCount         = {};
    uint32_t                                          meshOutputPerVertexGranularity    = {};
    uint32_t                                          meshOutputPerPrimitiveGranularity = {};
  };
  static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderPropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesNV>
  {
    using Type = PhysicalDeviceMeshShaderPropertiesNV;
  };

  struct PhysicalDeviceMultiviewFeatures
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 multiview_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_     = {},
      VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT
      : multiview( multiview_ )
      , multiviewGeometryShader( multiviewGeometryShader_ )
      , multiviewTessellationShader( multiviewTessellationShader_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiviewFeatures( *reinterpret_cast<PhysicalDeviceMultiviewFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures &
                            operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
    {
      multiview = multiview_;
      return *this;
    }

    PhysicalDeviceMultiviewFeatures &
      setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewGeometryShader = multiviewGeometryShader_;
      return *this;
    }

    PhysicalDeviceMultiviewFeatures &
      setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewTessellationShader = multiviewTessellationShader_;
      return *this;
    }

    operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures *>( this );
    }

    operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) &&
             ( multiviewGeometryShader == rhs.multiviewGeometryShader ) &&
             ( multiviewTessellationShader == rhs.multiviewTessellationShader );
    }

    bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                       = StructureType::ePhysicalDeviceMultiviewFeatures;
    void *                              pNext                       = {};
    VULKAN_HPP_NAMESPACE::Bool32        multiview                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        multiviewGeometryShader     = {};
    VULKAN_HPP_NAMESPACE::Bool32        multiviewTessellationShader = {};
  };
  static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMultiviewFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
  {
    using Type = PhysicalDeviceMultiviewFeatures;
  };
  using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;

  struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(
      VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT
      : perViewPositionAllComponents( perViewPositionAllComponents_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(
      PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(
      VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(
          *reinterpret_cast<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &
                            operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &
      operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>( this );
    }

    operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
    }

    bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        perViewPositionAllComponents = {};
  };
  static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) ==
                   sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
  {
    using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
  };

  struct PhysicalDeviceMultiviewProperties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceMultiviewProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_     = {},
                                         uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxMultiviewViewCount( maxMultiviewViewCount_ )
      , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMultiviewProperties( *reinterpret_cast<PhysicalDeviceMultiviewProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewProperties &
                            operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties *>( this );
    }

    operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) &&
             ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
    }

    bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                     = StructureType::ePhysicalDeviceMultiviewProperties;
    void *                              pNext                     = {};
    uint32_t                            maxMultiviewViewCount     = {};
    uint32_t                            maxMultiviewInstanceIndex = {};
  };
  static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMultiviewProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
  {
    using Type = PhysicalDeviceMultiviewProperties;
  };
  using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;

  struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(
      VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {} ) VULKAN_HPP_NOEXCEPT
      : mutableDescriptorType( mutableDescriptorType_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(
      PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(
          *reinterpret_cast<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &
                            operator=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &
      operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const *>( &rhs );
      return *this;
    }

    PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceMutableDescriptorTypeFeaturesVALVE &
      setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT
    {
      mutableDescriptorType = mutableDescriptorType_;
      return *this;
    }

    operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE *>( this );
    }

    operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & ) const = default;
#else
    bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType );
    }

    bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        mutableDescriptorType = {};
  };
  static_assert( sizeof( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE ) ==
                   sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE>
  {
    using Type = PhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
  };

  struct PhysicalDevicePCIBusInfoPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_   = {},
                                                                uint32_t pciBus_      = {},
                                                                uint32_t pciDevice_   = {},
                                                                uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT
      : pciDomain( pciDomain_ )
      , pciBus( pciBus_ )
      , pciDevice( pciDevice_ )
      , pciFunction( pciFunction_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePCIBusInfoPropertiesEXT(
          *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePCIBusInfoPropertiesEXT &
                            operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePCIBusInfoPropertiesEXT &
      operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
    }

    operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) &&
             ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && ( pciFunction == rhs.pciFunction );
    }

    bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType       = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
    void *                              pNext       = {};
    uint32_t                            pciDomain   = {};
    uint32_t                            pciBus      = {};
    uint32_t                            pciDevice   = {};
    uint32_t                            pciFunction = {};
  };
  static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePCIBusInfoPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
  {
    using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
  };

  struct PhysicalDevicePerformanceQueryFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_         = {},
      VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {} ) VULKAN_HPP_NOEXCEPT
      : performanceCounterQueryPools( performanceCounterQueryPools_ )
      , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(
      PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePerformanceQueryFeaturesKHR(
          *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR &
                            operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePerformanceQueryFeaturesKHR &
      operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDevicePerformanceQueryFeaturesKHR &
      setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
    {
      performanceCounterQueryPools = performanceCounterQueryPools_;
      return *this;
    }

    PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools(
      VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
    {
      performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
      return *this;
    }

    operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) &&
             ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
    }

    bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        performanceCounterQueryPools         = {};
    VULKAN_HPP_NAMESPACE::Bool32        performanceCounterMultipleQueryPools = {};
  };
  static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) ==
                   sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
  {
    using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
  };

  struct PhysicalDevicePerformanceQueryPropertiesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {} ) VULKAN_HPP_NOEXCEPT
      : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(
      PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePerformanceQueryPropertiesKHR(
          *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryPropertiesKHR &
                            operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePerformanceQueryPropertiesKHR &
      operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
    }

    bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        allowCommandBufferQueryCopies = {};
  };
  static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) ==
                   sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryPropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
  {
    using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
  };

  struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {} ) VULKAN_HPP_NOEXCEPT
      : pipelineCreationCacheControl( pipelineCreationCacheControl_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT(
      PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineCreationCacheControlFeaturesEXT(
      VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineCreationCacheControlFeaturesEXT(
          *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeaturesEXT &
                            operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineCreationCacheControlFeaturesEXT &
      operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDevicePipelineCreationCacheControlFeaturesEXT &
      setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineCreationCacheControl = pipelineCreationCacheControl_;
      return *this;
    }

    operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
    }

    bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        pipelineCreationCacheControl = {};
  };
  static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) ==
                   sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePipelineCreationCacheControlFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT>
  {
    using Type = PhysicalDevicePipelineCreationCacheControlFeaturesEXT;
  };

  struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : pipelineExecutableInfo( pipelineExecutableInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(
      PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(
      VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(
          *reinterpret_cast<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &
                            operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &
      operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDevicePipelineExecutablePropertiesFeaturesKHR &
      setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineExecutableInfo = pipelineExecutableInfo_;
      return *this;
    }

    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
    }

    bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        pipelineExecutableInfo = {};
  };
  static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) ==
                   sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
  {
    using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
  };

  struct PhysicalDevicePointClippingProperties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePointClippingProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(
      VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ =
        VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT
      : pointClippingBehavior( pointClippingBehavior_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePointClippingProperties(
          *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePointClippingProperties &
                            operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePointClippingProperties &
      operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties *>( this );
    }

    operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePointClippingProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default;
#else
    bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior );
    }

    bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType = StructureType::ePhysicalDevicePointClippingProperties;
    void *                                      pNext = {};
    VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior =
      VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
  };
  static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePointClippingProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
  {
    using Type = PhysicalDevicePointClippingProperties;
  };
  using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct PhysicalDevicePortabilitySubsetFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_         = {},
      VULKAN_HPP_NAMESPACE::Bool32 events_                                 = {},
      VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_        = {},
      VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_                 = {},
      VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_              = {},
      VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_                          = {},
      VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_                      = {},
      VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_                 = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 triangleFans_                           = {},
      VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_      = {} ) VULKAN_HPP_NOEXCEPT
      : constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ )
      , events( events_ )
      , imageViewFormatReinterpretation( imageViewFormatReinterpretation_ )
      , imageViewFormatSwizzle( imageViewFormatSwizzle_ )
      , imageView2DOn3DImage( imageView2DOn3DImage_ )
      , multisampleArrayImage( multisampleArrayImage_ )
      , mutableComparisonSamplers( mutableComparisonSamplers_ )
      , pointPolygons( pointPolygons_ )
      , samplerMipLodBias( samplerMipLodBias_ )
      , separateStencilMaskRef( separateStencilMaskRef_ )
      , shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ )
      , tessellationIsolines( tessellationIsolines_ )
      , tessellationPointMode( tessellationPointMode_ )
      , triangleFans( triangleFans_ )
      , vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(
      PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePortabilitySubsetFeaturesKHR(
          *reinterpret_cast<PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR &
                            operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors(
      VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
    {
      constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT
    {
      events = events_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation(
      VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
    {
      imageViewFormatReinterpretation = imageViewFormatReinterpretation_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
    {
      imageViewFormatSwizzle = imageViewFormatSwizzle_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
    {
      imageView2DOn3DImage = imageView2DOn3DImage_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
    {
      multisampleArrayImage = multisampleArrayImage_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
    {
      mutableComparisonSamplers = mutableComparisonSamplers_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
    {
      pointPolygons = pointPolygons_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerMipLodBias = samplerMipLodBias_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
    {
      separateStencilMaskRef = separateStencilMaskRef_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions(
      VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
    {
      tessellationIsolines = tessellationIsolines_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
    {
      tessellationPointMode = tessellationPointMode_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR &
      setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
    {
      triangleFans = triangleFans_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride(
      VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_;
      return *this;
    }

    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR *>( this );
    }

    operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default;
#  else
    bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && ( events == rhs.events ) &&
             ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) &&
             ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) &&
             ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) &&
             ( multisampleArrayImage == rhs.multisampleArrayImage ) &&
             ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && ( pointPolygons == rhs.pointPolygons ) &&
             ( samplerMipLodBias == rhs.samplerMipLodBias ) &&
             ( separateStencilMaskRef == rhs.separateStencilMaskRef ) &&
             ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) &&
             ( tessellationIsolines == rhs.tessellationIsolines ) &&
             ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) &&
             ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride );
    }

    bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        constantAlphaColorBlendFactors         = {};
    VULKAN_HPP_NAMESPACE::Bool32        events                                 = {};
    VULKAN_HPP_NAMESPACE::Bool32        imageViewFormatReinterpretation        = {};
    VULKAN_HPP_NAMESPACE::Bool32        imageViewFormatSwizzle                 = {};
    VULKAN_HPP_NAMESPACE::Bool32        imageView2DOn3DImage                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        multisampleArrayImage                  = {};
    VULKAN_HPP_NAMESPACE::Bool32        mutableComparisonSamplers              = {};
    VULKAN_HPP_NAMESPACE::Bool32        pointPolygons                          = {};
    VULKAN_HPP_NAMESPACE::Bool32        samplerMipLodBias                      = {};
    VULKAN_HPP_NAMESPACE::Bool32        separateStencilMaskRef                 = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSampleRateInterpolationFunctions = {};
    VULKAN_HPP_NAMESPACE::Bool32        tessellationIsolines                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        tessellationPointMode                  = {};
    VULKAN_HPP_NAMESPACE::Bool32        triangleFans                           = {};
    VULKAN_HPP_NAMESPACE::Bool32        vertexAttributeAccessBeyondStride      = {};
  };
  static_assert( sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) ==
                   sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePortabilitySubsetFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR>
  {
    using Type = PhysicalDevicePortabilitySubsetFeaturesKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct PhysicalDevicePortabilitySubsetPropertiesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(
      uint32_t minVertexInputBindingStrideAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
      : minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(
      PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePortabilitySubsetPropertiesKHR(
          *reinterpret_cast<PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR &
                            operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePortabilitySubsetPropertiesKHR &
      operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDevicePortabilitySubsetPropertiesKHR &
      setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT
    {
      minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_;
      return *this;
    }

    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default;
#  else
    bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
    }

    bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
    void *                              pNext = {};
    uint32_t                            minVertexInputBindingStrideAlignment = {};
  };
  static_assert( sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) ==
                   sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePortabilitySubsetPropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR>
  {
    using Type = PhysicalDevicePortabilitySubsetPropertiesKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

  struct PhysicalDevicePrivateDataFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePrivateDataFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePrivateDataFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {} ) VULKAN_HPP_NOEXCEPT
      : privateData( privateData_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT( PhysicalDevicePrivateDataFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePrivateDataFeaturesEXT( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePrivateDataFeaturesEXT( *reinterpret_cast<PhysicalDevicePrivateDataFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeaturesEXT &
                            operator=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePrivateDataFeaturesEXT &
      operator=( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDevicePrivateDataFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDevicePrivateDataFeaturesEXT &
      setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
    {
      privateData = privateData_;
      return *this;
    }

    operator VkPhysicalDevicePrivateDataFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeaturesEXT *>( this );
    }

    operator VkPhysicalDevicePrivateDataFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePrivateDataFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePrivateDataFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData );
    }

    bool operator!=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType       = StructureType::ePhysicalDevicePrivateDataFeaturesEXT;
    void *                              pNext       = {};
    VULKAN_HPP_NAMESPACE::Bool32        privateData = {};
  };
  static_assert( sizeof( PhysicalDevicePrivateDataFeaturesEXT ) == sizeof( VkPhysicalDevicePrivateDataFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePrivateDataFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeaturesEXT>
  {
    using Type = PhysicalDevicePrivateDataFeaturesEXT;
  };

  struct PhysicalDeviceProtectedMemoryFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceProtectedMemoryFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT
      : protectedMemory( protectedMemory_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProtectedMemoryFeatures(
          *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures &
                            operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProtectedMemoryFeatures &
      operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceProtectedMemoryFeatures &
      setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      protectedMemory = protectedMemory_;
      return *this;
    }

    operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures *>( this );
    }

    operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory );
    }

    bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
    void *                              pNext           = {};
    VULKAN_HPP_NAMESPACE::Bool32        protectedMemory = {};
  };
  static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
  {
    using Type = PhysicalDeviceProtectedMemoryFeatures;
  };

  struct PhysicalDeviceProtectedMemoryProperties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceProtectedMemoryProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT
      : protectedNoFault( protectedNoFault_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProtectedMemoryProperties(
          *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryProperties &
                            operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProtectedMemoryProperties &
      operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties *>( this );
    }

    operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault );
    }

    bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePhysicalDeviceProtectedMemoryProperties;
    void *                              pNext            = {};
    VULKAN_HPP_NAMESPACE::Bool32        protectedNoFault = {};
  };
  static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) ==
                   sizeof( VkPhysicalDeviceProtectedMemoryProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
  {
    using Type = PhysicalDeviceProtectedMemoryProperties;
  };

  struct PhysicalDeviceProvokingVertexFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_                       = {},
      VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT
      : provokingVertexLast( provokingVertexLast_ )
      , transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT(
      PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProvokingVertexFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT &
                            operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProvokingVertexFeaturesEXT &
      operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceProvokingVertexFeaturesEXT &
      setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT
    {
      provokingVertexLast = provokingVertexLast_;
      return *this;
    }

    PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex(
      VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT
    {
      transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_;
      return *this;
    }

    operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProvokingVertexFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) &&
             ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex );
    }

    bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT;
    void *                              pNext               = {};
    VULKAN_HPP_NAMESPACE::Bool32        provokingVertexLast = {};
    VULKAN_HPP_NAMESPACE::Bool32        transformFeedbackPreservesProvokingVertex = {};
  };
  static_assert( sizeof( PhysicalDeviceProvokingVertexFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceProvokingVertexFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT>
  {
    using Type = PhysicalDeviceProvokingVertexFeaturesEXT;
  };

  struct PhysicalDeviceProvokingVertexPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_                       = {},
      VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {} ) VULKAN_HPP_NOEXCEPT
      : provokingVertexModePerPipeline( provokingVertexModePerPipeline_ )
      , transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT(
      PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceProvokingVertexPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexPropertiesEXT &
                            operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceProvokingVertexPropertiesEXT &
      operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceProvokingVertexPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceProvokingVertexPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) &&
             ( transformFeedbackPreservesTriangleFanProvokingVertex ==
               rhs.transformFeedbackPreservesTriangleFanProvokingVertex );
    }

    bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        provokingVertexModePerPipeline                       = {};
    VULKAN_HPP_NAMESPACE::Bool32        transformFeedbackPreservesTriangleFanProvokingVertex = {};
  };
  static_assert( sizeof( PhysicalDeviceProvokingVertexPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceProvokingVertexPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT>
  {
    using Type = PhysicalDeviceProvokingVertexPropertiesEXT;
  };

  struct PhysicalDevicePushDescriptorPropertiesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxPushDescriptors( maxPushDescriptors_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(
      PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDevicePushDescriptorPropertiesKHR(
          *reinterpret_cast<PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushDescriptorPropertiesKHR &
                            operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDevicePushDescriptorPropertiesKHR &
      operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR *>( this );
    }

    operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors );
    }

    bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType              = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
    void *                              pNext              = {};
    uint32_t                            maxPushDescriptors = {};
  };
  static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) ==
                   sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDevicePushDescriptorPropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDevicePushDescriptorPropertiesKHR>
  {
    using Type = PhysicalDevicePushDescriptorPropertiesKHR;
  };

  struct PhysicalDeviceRayQueryFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceRayQueryFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {} ) VULKAN_HPP_NOEXCEPT
      : rayQuery( rayQuery_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR &
                            operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
    {
      rayQuery = rayQuery_;
      return *this;
    }

    operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayQueryFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayQueryFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery );
    }

    bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
    void *                              pNext    = {};
    VULKAN_HPP_NAMESPACE::Bool32        rayQuery = {};
  };
  static_assert( sizeof( PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceRayQueryFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayQueryFeaturesKHR>
  {
    using Type = PhysicalDeviceRayQueryFeaturesKHR;
  };

  struct PhysicalDeviceRayTracingPipelineFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_                                    = {},
      VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_                          = {} ) VULKAN_HPP_NOEXCEPT
      : rayTracingPipeline( rayTracingPipeline_ )
      , rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ )
      , rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ )
      , rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ )
      , rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(
      PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingPipelineFeaturesKHR(
          *reinterpret_cast<PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR &
                            operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPipelineFeaturesKHR &
      operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceRayTracingPipelineFeaturesKHR &
      setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPipeline = rayTracingPipeline_;
      return *this;
    }

    PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplay(
      VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_;
      return *this;
    }

    PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed(
      VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_;
      return *this;
    }

    PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect(
      VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_;
      return *this;
    }

    PhysicalDeviceRayTracingPipelineFeaturesKHR &
      setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
    {
      rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_;
      return *this;
    }

    operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelineFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelineFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) &&
             ( rayTracingPipelineShaderGroupHandleCaptureReplay ==
               rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) &&
             ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed ==
               rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) &&
             ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) &&
             ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling );
    }

    bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        rayTracingPipeline                                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        rayTracingPipelineShaderGroupHandleCaptureReplay      = {};
    VULKAN_HPP_NAMESPACE::Bool32        rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {};
    VULKAN_HPP_NAMESPACE::Bool32        rayTracingPipelineTraceRaysIndirect                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        rayTraversalPrimitiveCulling                          = {};
  };
  static_assert( sizeof( PhysicalDeviceRayTracingPipelineFeaturesKHR ) ==
                   sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPipelineFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR>
  {
    using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR;
  };

  struct PhysicalDeviceRayTracingPipelinePropertiesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_              = {},
                                                     uint32_t maxRayRecursionDepth_               = {},
                                                     uint32_t maxShaderGroupStride_               = {},
                                                     uint32_t shaderGroupBaseAlignment_           = {},
                                                     uint32_t shaderGroupHandleCaptureReplaySize_ = {},
                                                     uint32_t maxRayDispatchInvocationCount_      = {},
                                                     uint32_t shaderGroupHandleAlignment_         = {},
                                                     uint32_t maxRayHitAttributeSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderGroupHandleSize( shaderGroupHandleSize_ )
      , maxRayRecursionDepth( maxRayRecursionDepth_ )
      , maxShaderGroupStride( maxShaderGroupStride_ )
      , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ )
      , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ )
      , maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ )
      , shaderGroupHandleAlignment( shaderGroupHandleAlignment_ )
      , maxRayHitAttributeSize( maxRayHitAttributeSize_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR(
      PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingPipelinePropertiesKHR(
          *reinterpret_cast<PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelinePropertiesKHR &
                            operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPipelinePropertiesKHR &
      operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelinePropertiesKHR *>( this );
    }

    operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelinePropertiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) &&
             ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) &&
             ( maxShaderGroupStride == rhs.maxShaderGroupStride ) &&
             ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) &&
             ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) &&
             ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) &&
             ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) &&
             ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize );
    }

    bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
    void *                              pNext = {};
    uint32_t                            shaderGroupHandleSize              = {};
    uint32_t                            maxRayRecursionDepth               = {};
    uint32_t                            maxShaderGroupStride               = {};
    uint32_t                            shaderGroupBaseAlignment           = {};
    uint32_t                            shaderGroupHandleCaptureReplaySize = {};
    uint32_t                            maxRayDispatchInvocationCount      = {};
    uint32_t                            shaderGroupHandleAlignment         = {};
    uint32_t                            maxRayHitAttributeSize             = {};
  };
  static_assert( sizeof( PhysicalDeviceRayTracingPipelinePropertiesKHR ) ==
                   sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPipelinePropertiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR>
  {
    using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR;
  };

  struct PhysicalDeviceRayTracingPropertiesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceRayTracingPropertiesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_                  = {},
                                            uint32_t maxRecursionDepth_                      = {},
                                            uint32_t maxShaderGroupStride_                   = {},
                                            uint32_t shaderGroupBaseAlignment_               = {},
                                            uint64_t maxGeometryCount_                       = {},
                                            uint64_t maxInstanceCount_                       = {},
                                            uint64_t maxTriangleCount_                       = {},
                                            uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderGroupHandleSize( shaderGroupHandleSize_ )
      , maxRecursionDepth( maxRecursionDepth_ )
      , maxShaderGroupStride( maxShaderGroupStride_ )
      , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ )
      , maxGeometryCount( maxGeometryCount_ )
      , maxInstanceCount( maxInstanceCount_ )
      , maxTriangleCount( maxTriangleCount_ )
      , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingPropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPropertiesNV &
                            operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRayTracingPropertiesNV &
      operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) &&
             ( maxShaderGroupStride == rhs.maxShaderGroupStride ) &&
             ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) &&
             ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) &&
             ( maxTriangleCount == rhs.maxTriangleCount ) &&
             ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
    }

    bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                    = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
    void *                              pNext                    = {};
    uint32_t                            shaderGroupHandleSize    = {};
    uint32_t                            maxRecursionDepth        = {};
    uint32_t                            maxShaderGroupStride     = {};
    uint32_t                            shaderGroupBaseAlignment = {};
    uint64_t                            maxGeometryCount         = {};
    uint64_t                            maxInstanceCount         = {};
    uint64_t                            maxTriangleCount         = {};
    uint32_t                            maxDescriptorSetAccelerationStructures = {};
  };
  static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesNV>
  {
    using Type = PhysicalDeviceRayTracingPropertiesNV;
  };

  struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT
      : representativeFragmentTest( representativeFragmentTest_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(
      PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRepresentativeFragmentTestFeaturesNV(
      VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRepresentativeFragmentTestFeaturesNV(
          *reinterpret_cast<PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV &
                            operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRepresentativeFragmentTestFeaturesNV &
      operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceRepresentativeFragmentTestFeaturesNV &
      setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
    {
      representativeFragmentTest = representativeFragmentTest_;
      return *this;
    }

    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( representativeFragmentTest == rhs.representativeFragmentTest );
    }

    bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        representativeFragmentTest = {};
  };
  static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV>
  {
    using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
  };

  struct PhysicalDeviceRobustness2FeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceRobustness2FeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {},
                                            VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_  = {},
                                            VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_      = {} ) VULKAN_HPP_NOEXCEPT
      : robustBufferAccess2( robustBufferAccess2_ )
      , robustImageAccess2( robustImageAccess2_ )
      , nullDescriptor( nullDescriptor_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast<PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT &
                            operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRobustness2FeaturesEXT &
      operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceRobustness2FeaturesEXT &
      setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
    {
      robustBufferAccess2 = robustBufferAccess2_;
      return *this;
    }

    PhysicalDeviceRobustness2FeaturesEXT &
      setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
    {
      robustImageAccess2 = robustImageAccess2_;
      return *this;
    }

    PhysicalDeviceRobustness2FeaturesEXT &
      setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
    {
      nullDescriptor = nullDescriptor_;
      return *this;
    }

    operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) &&
             ( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor );
    }

    bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
    void *                              pNext               = {};
    VULKAN_HPP_NAMESPACE::Bool32        robustBufferAccess2 = {};
    VULKAN_HPP_NAMESPACE::Bool32        robustImageAccess2  = {};
    VULKAN_HPP_NAMESPACE::Bool32        nullDescriptor      = {};
  };
  static_assert( sizeof( PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceRobustness2FeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesEXT>
  {
    using Type = PhysicalDeviceRobustness2FeaturesEXT;
  };

  struct PhysicalDeviceRobustness2PropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceRobustness2PropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(
      VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {},
      VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
      : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ )
      , robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceRobustness2PropertiesEXT(
          *reinterpret_cast<PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2PropertiesEXT &
                            operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceRobustness2PropertiesEXT &
      operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) &&
             ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
    }

    bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    robustStorageBufferAccessSizeAlignment = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    robustUniformBufferAccessSizeAlignment = {};
  };
  static_assert( sizeof( PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceRobustness2PropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesEXT>
  {
    using Type = PhysicalDeviceRobustness2PropertiesEXT;
  };

  struct PhysicalDeviceSampleLocationsPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(
      VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_    = {},
      VULKAN_HPP_NAMESPACE::Extent2D         maxSampleLocationGridSize_     = {},
      std::array<float, 2> const &           sampleLocationCoordinateRange_ = {},
      uint32_t                               sampleLocationSubPixelBits_    = {},
      VULKAN_HPP_NAMESPACE::Bool32           variableSampleLocations_       = {} ) VULKAN_HPP_NOEXCEPT
      : sampleLocationSampleCounts( sampleLocationSampleCounts_ )
      , maxSampleLocationGridSize( maxSampleLocationGridSize_ )
      , sampleLocationCoordinateRange( sampleLocationCoordinateRange_ )
      , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ )
      , variableSampleLocations( variableSampleLocations_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(
      PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSampleLocationsPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT &
                            operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSampleLocationsPropertiesEXT &
      operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) &&
             ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) &&
             ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) &&
             ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) &&
             ( variableSampleLocations == rhs.variableSampleLocations );
    }

    bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
    void *                                         pNext = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags         sampleLocationSampleCounts    = {};
    VULKAN_HPP_NAMESPACE::Extent2D                 maxSampleLocationGridSize     = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
    uint32_t                                       sampleLocationSubPixelBits    = {};
    VULKAN_HPP_NAMESPACE::Bool32                   variableSampleLocations       = {};
  };
  static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSampleLocationsPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
  {
    using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
  };

  struct PhysicalDeviceSamplerFilterMinmaxProperties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(
      VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_  = {} ) VULKAN_HPP_NOEXCEPT
      : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
      , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(
      PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSamplerFilterMinmaxProperties(
          *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerFilterMinmaxProperties &
                            operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSamplerFilterMinmaxProperties &
      operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
    }

    operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) &&
             ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
    }

    bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        filterMinmaxSingleComponentFormats = {};
    VULKAN_HPP_NAMESPACE::Bool32        filterMinmaxImageComponentMapping  = {};
  };
  static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) ==
                   sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSamplerFilterMinmaxProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
  {
    using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
  };
  using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;

  struct PhysicalDeviceSamplerYcbcrConversionFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT
      : samplerYcbcrConversion( samplerYcbcrConversion_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(
      PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSamplerYcbcrConversionFeatures(
          *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures &
                            operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSamplerYcbcrConversionFeatures &
      operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceSamplerYcbcrConversionFeatures &
      setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerYcbcrConversion = samplerYcbcrConversion_;
      return *this;
    }

    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
    }

    operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
    }

    bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        samplerYcbcrConversion = {};
  };
  static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) ==
                   sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSamplerYcbcrConversionFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
  {
    using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
  };
  using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;

  struct PhysicalDeviceScalarBlockLayoutFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} )
      VULKAN_HPP_NOEXCEPT : scalarBlockLayout( scalarBlockLayout_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceScalarBlockLayoutFeatures(
          *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures &
                            operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceScalarBlockLayoutFeatures &
      operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceScalarBlockLayoutFeatures &
      setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      scalarBlockLayout = scalarBlockLayout_;
      return *this;
    }

    operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
    }

    operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout );
    }

    bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
    void *                              pNext             = {};
    VULKAN_HPP_NAMESPACE::Bool32        scalarBlockLayout = {};
  };
  static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) ==
                   sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceScalarBlockLayoutFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
  {
    using Type = PhysicalDeviceScalarBlockLayoutFeatures;
  };
  using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;

  struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {} ) VULKAN_HPP_NOEXCEPT
      : separateDepthStencilLayouts( separateDepthStencilLayouts_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(
      PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSeparateDepthStencilLayoutsFeatures(
          *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
                            operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
      operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceSeparateDepthStencilLayoutsFeatures &
      setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      separateDepthStencilLayouts = separateDepthStencilLayouts_;
      return *this;
    }

    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
    }

    operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
    }

    bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        separateDepthStencilLayouts = {};
  };
  static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) ==
                   sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
  {
    using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
  };
  using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;

  struct PhysicalDeviceShaderAtomicFloatFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_   = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_   = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_   = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_   = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_    = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_  = {},
      VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_    = {},
      VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_  = {} ) VULKAN_HPP_NOEXCEPT
      : shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ )
      , shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ )
      , shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ )
      , shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ )
      , shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ )
      , shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ )
      , shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ )
      , shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ )
      , shaderImageFloat32Atomics( shaderImageFloat32Atomics_ )
      , shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ )
      , sparseImageFloat32Atomics( sparseImageFloat32Atomics_ )
      , sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(
      PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderAtomicFloatFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT &
                            operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
      return *this;
    }

    PhysicalDeviceShaderAtomicFloatFeaturesEXT &
      setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
      return *this;
    }

    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) &&
             ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) &&
             ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) &&
             ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) &&
             ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) &&
             ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) &&
             ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) &&
             ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) &&
             ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) &&
             ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) &&
             ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) &&
             ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
    }

    bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderBufferFloat32Atomics   = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderBufferFloat32AtomicAdd = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderBufferFloat64Atomics   = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderBufferFloat64AtomicAdd = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSharedFloat32Atomics   = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSharedFloat32AtomicAdd = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSharedFloat64Atomics   = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSharedFloat64AtomicAdd = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderImageFloat32Atomics    = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderImageFloat32AtomicAdd  = {};
    VULKAN_HPP_NAMESPACE::Bool32        sparseImageFloat32Atomics    = {};
    VULKAN_HPP_NAMESPACE::Bool32        sparseImageFloat32AtomicAdd  = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
  };

  struct PhysicalDeviceShaderAtomicInt64Features
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderAtomicInt64Features;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(
      VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
      , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderAtomicInt64Features(
          *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features &
                            operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderAtomicInt64Features &
      operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderAtomicInt64Features &
      setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
      return *this;
    }

    PhysicalDeviceShaderAtomicInt64Features &
      setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
      return *this;
    }

    operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features *>( this );
    }

    operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) &&
             ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
    }

    bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderBufferInt64Atomics = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSharedInt64Atomics = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) ==
                   sizeof( VkPhysicalDeviceShaderAtomicInt64Features ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicInt64Features>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
  {
    using Type = PhysicalDeviceShaderAtomicInt64Features;
  };
  using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;

  struct PhysicalDeviceShaderClockFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderClockFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {},
                                            VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_   = {} ) VULKAN_HPP_NOEXCEPT
      : shaderSubgroupClock( shaderSubgroupClock_ )
      , shaderDeviceClock( shaderDeviceClock_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR &
                            operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderClockFeaturesKHR &
      operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderClockFeaturesKHR &
      setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupClock = shaderSubgroupClock_;
      return *this;
    }

    PhysicalDeviceShaderClockFeaturesKHR &
      setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderDeviceClock = shaderDeviceClock_;
      return *this;
    }

    operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) &&
             ( shaderDeviceClock == rhs.shaderDeviceClock );
    }

    bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType               = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
    void *                              pNext               = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSubgroupClock = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderDeviceClock   = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderClockFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderClockFeaturesKHR;
  };

  struct PhysicalDeviceShaderCoreProperties2AMD
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderCoreProperties2AMD;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(
      VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_     = {},
      uint32_t                                           activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderCoreFeatures( shaderCoreFeatures_ )
      , activeComputeUnitCount( activeComputeUnitCount_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderCoreProperties2AMD(
          *reinterpret_cast<PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreProperties2AMD &
                            operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderCoreProperties2AMD &
      operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD *>( this );
    }

    operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) &&
             ( activeComputeUnitCount == rhs.activeComputeUnitCount );
    }

    bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
    void *                                             pNext = {};
    VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures     = {};
    uint32_t                                           activeComputeUnitCount = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderCoreProperties2AMD>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreProperties2AMD>
  {
    using Type = PhysicalDeviceShaderCoreProperties2AMD;
  };

  struct PhysicalDeviceShaderCorePropertiesAMD
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderCorePropertiesAMD;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_          = {},
                                             uint32_t shaderArraysPerEngineCount_ = {},
                                             uint32_t computeUnitsPerShaderArray_ = {},
                                             uint32_t simdPerComputeUnit_         = {},
                                             uint32_t wavefrontsPerSimd_          = {},
                                             uint32_t wavefrontSize_              = {},
                                             uint32_t sgprsPerSimd_               = {},
                                             uint32_t minSgprAllocation_          = {},
                                             uint32_t maxSgprAllocation_          = {},
                                             uint32_t sgprAllocationGranularity_  = {},
                                             uint32_t vgprsPerSimd_               = {},
                                             uint32_t minVgprAllocation_          = {},
                                             uint32_t maxVgprAllocation_          = {},
                                             uint32_t vgprAllocationGranularity_  = {} ) VULKAN_HPP_NOEXCEPT
      : shaderEngineCount( shaderEngineCount_ )
      , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ )
      , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ )
      , simdPerComputeUnit( simdPerComputeUnit_ )
      , wavefrontsPerSimd( wavefrontsPerSimd_ )
      , wavefrontSize( wavefrontSize_ )
      , sgprsPerSimd( sgprsPerSimd_ )
      , minSgprAllocation( minSgprAllocation_ )
      , maxSgprAllocation( maxSgprAllocation_ )
      , sgprAllocationGranularity( sgprAllocationGranularity_ )
      , vgprsPerSimd( vgprsPerSimd_ )
      , minVgprAllocation( minVgprAllocation_ )
      , maxVgprAllocation( maxVgprAllocation_ )
      , vgprAllocationGranularity( vgprAllocationGranularity_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderCorePropertiesAMD(
          *reinterpret_cast<PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCorePropertiesAMD &
                            operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderCorePropertiesAMD &
      operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD *>( this );
    }

    operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) &&
             ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) &&
             ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) &&
             ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) &&
             ( wavefrontSize == rhs.wavefrontSize ) && ( sgprsPerSimd == rhs.sgprsPerSimd ) &&
             ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) &&
             ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) &&
             ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) &&
             ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
    }

    bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
    void *                              pNext             = {};
    uint32_t                            shaderEngineCount = {};
    uint32_t                            shaderArraysPerEngineCount = {};
    uint32_t                            computeUnitsPerShaderArray = {};
    uint32_t                            simdPerComputeUnit         = {};
    uint32_t                            wavefrontsPerSimd          = {};
    uint32_t                            wavefrontSize              = {};
    uint32_t                            sgprsPerSimd               = {};
    uint32_t                            minSgprAllocation          = {};
    uint32_t                            maxSgprAllocation          = {};
    uint32_t                            sgprAllocationGranularity  = {};
    uint32_t                            vgprsPerSimd               = {};
    uint32_t                            minVgprAllocation          = {};
    uint32_t                            maxVgprAllocation          = {};
    uint32_t                            vgprAllocationGranularity  = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderCorePropertiesAMD>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesAMD>
  {
    using Type = PhysicalDeviceShaderCorePropertiesAMD;
  };

  struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(
      PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(
      VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &
                            operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &
      operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>(
        &rhs );
      return *this;
    }

    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation(
      VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
      return *this;
    }

    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
    }

    bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderDemoteToHelperInvocation = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>
  {
    using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
  };

  struct PhysicalDeviceShaderDrawParametersFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderDrawParametersFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderDrawParameters( shaderDrawParameters_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(
      PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderDrawParametersFeatures(
          *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures &
                            operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderDrawParametersFeatures &
      operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderDrawParametersFeatures &
      setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderDrawParameters = shaderDrawParameters_;
      return *this;
    }

    operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters );
    }

    bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderDrawParameters = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) ==
                   sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderDrawParametersFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
  {
    using Type = PhysicalDeviceShaderDrawParametersFeatures;
  };
  using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;

  struct PhysicalDeviceShaderFloat16Int8Features
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderFloat16Int8Features;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {},
                                               VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_    = {} ) VULKAN_HPP_NOEXCEPT
      : shaderFloat16( shaderFloat16_ )
      , shaderInt8( shaderInt8_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderFloat16Int8Features(
          *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features &
                            operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderFloat16Int8Features &
      operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderFloat16Int8Features &
      setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloat16 = shaderFloat16_;
      return *this;
    }

    PhysicalDeviceShaderFloat16Int8Features &
      setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInt8 = shaderInt8_;
      return *this;
    }

    operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features *>( this );
    }

    operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) &&
             ( shaderInt8 == rhs.shaderInt8 );
    }

    bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
    void *                              pNext         = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderFloat16 = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderInt8    = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) ==
                   sizeof( VkPhysicalDeviceShaderFloat16Int8Features ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderFloat16Int8Features>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
  {
    using Type = PhysicalDeviceShaderFloat16Int8Features;
  };
  using PhysicalDeviceFloat16Int8FeaturesKHR       = PhysicalDeviceShaderFloat16Int8Features;
  using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;

  struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderImageInt64Atomics( shaderImageInt64Atomics_ )
      , sparseImageInt64Atomics( sparseImageInt64Atomics_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(
      PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(
          *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
                            operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
      operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
      setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderImageInt64Atomics = shaderImageInt64Atomics_;
      return *this;
    }

    PhysicalDeviceShaderImageAtomicInt64FeaturesEXT &
      setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      sparseImageInt64Atomics = sparseImageInt64Atomics_;
      return *this;
    }

    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) &&
             ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics );
    }

    bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderImageInt64Atomics = {};
    VULKAN_HPP_NAMESPACE::Bool32        sparseImageInt64Atomics = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
  {
    using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
  };

  struct PhysicalDeviceShaderImageFootprintFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT : imageFootprint( imageFootprint_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(
      PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderImageFootprintFeaturesNV(
          *reinterpret_cast<PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV &
                            operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderImageFootprintFeaturesNV &
      operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderImageFootprintFeaturesNV &
      setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
    {
      imageFootprint = imageFootprint_;
      return *this;
    }

    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint );
    }

    bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
    void *                              pNext          = {};
    VULKAN_HPP_NAMESPACE::Bool32        imageFootprint = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderImageFootprintFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV>
  {
    using Type = PhysicalDeviceShaderImageFootprintFeaturesNV;
  };

  struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(
      VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderIntegerFunctions2( shaderIntegerFunctions2_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(
      PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(
      VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(
          *reinterpret_cast<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &
                            operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &
      operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &
      setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderIntegerFunctions2 = shaderIntegerFunctions2_;
      return *this;
    }

    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>( this );
    }

    operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
    }

    bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderIntegerFunctions2 = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) ==
                   sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
  {
    using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
  };

  struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} )
      VULKAN_HPP_NOEXCEPT : shaderSMBuiltins( shaderSMBuiltins_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(
      PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderSMBuiltinsFeaturesNV(
          *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV &
                            operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSMBuiltinsFeaturesNV &
      operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderSMBuiltinsFeaturesNV &
      setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSMBuiltins = shaderSMBuiltins_;
      return *this;
    }

    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins );
    }

    bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
    void *                              pNext            = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSMBuiltins = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV>
  {
    using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
  };

  struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_    = {},
                                                  uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderSMCount( shaderSMCount_ )
      , shaderWarpsPerSM( shaderWarpsPerSM_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(
      PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderSMBuiltinsPropertiesNV(
          *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsPropertiesNV &
                            operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSMBuiltinsPropertiesNV &
      operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>( this );
    }

    operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) &&
             ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
    }

    bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
    void *                              pNext            = {};
    uint32_t                            shaderSMCount    = {};
    uint32_t                            shaderWarpsPerSM = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) ==
                   sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV>
  {
    using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
  };

  struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(
      PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderSubgroupExtendedTypesFeatures(
          *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
                            operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
      operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderSubgroupExtendedTypesFeatures &
      setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
      return *this;
    }

    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
    }

    bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSubgroupExtendedTypes = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) ==
                   sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
  {
    using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
  };
  using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;

  struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
      PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
      VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR(
          *reinterpret_cast<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
                            operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &
      operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const *>(
          &rhs );
      return *this;
    }

    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setShaderSubgroupUniformControlFlow(
      VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_;
      return *this;
    }

    operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow );
    }

    bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType =
      StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
    void *                       pNext                            = {};
    VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) ==
                   sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
  };

  struct PhysicalDeviceShaderTerminateInvocationFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderTerminateInvocation( shaderTerminateInvocation_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR(
      PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderTerminateInvocationFeaturesKHR(
      VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShaderTerminateInvocationFeaturesKHR(
          *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeaturesKHR &
                            operator=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShaderTerminateInvocationFeaturesKHR &
      operator=( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShaderTerminateInvocationFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShaderTerminateInvocationFeaturesKHR &
      setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderTerminateInvocation = shaderTerminateInvocation_;
      return *this;
    }

    operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
    }

    bool operator!=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderTerminateInvocation = {};
  };
  static_assert( sizeof( PhysicalDeviceShaderTerminateInvocationFeaturesKHR ) ==
                   sizeof( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShaderTerminateInvocationFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR>
  {
    using Type = PhysicalDeviceShaderTerminateInvocationFeaturesKHR;
  };

  struct PhysicalDeviceShadingRateImageFeaturesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(
      VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_             = {},
      VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT
      : shadingRateImage( shadingRateImage_ )
      , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(
      PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShadingRateImageFeaturesNV(
          *reinterpret_cast<PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV &
                            operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShadingRateImageFeaturesNV &
      operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs );
      return *this;
    }

    PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceShadingRateImageFeaturesNV &
      setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateImage = shadingRateImage_;
      return *this;
    }

    PhysicalDeviceShadingRateImageFeaturesNV &
      setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
      return *this;
    }

    operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV *>( this );
    }

    operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) &&
             ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
    }

    bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
    void *                              pNext            = {};
    VULKAN_HPP_NAMESPACE::Bool32        shadingRateImage = {};
    VULKAN_HPP_NAMESPACE::Bool32        shadingRateCoarseSampleOrder = {};
  };
  static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) ==
                   sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImageFeaturesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImageFeaturesNV>
  {
    using Type = PhysicalDeviceShadingRateImageFeaturesNV;
  };

  struct PhysicalDeviceShadingRateImagePropertiesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_   = {},
                                                  uint32_t                       shadingRatePaletteSize_ = {},
                                                  uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT
      : shadingRateTexelSize( shadingRateTexelSize_ )
      , shadingRatePaletteSize( shadingRatePaletteSize_ )
      , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(
      PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceShadingRateImagePropertiesNV(
          *reinterpret_cast<PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImagePropertiesNV &
                            operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceShadingRateImagePropertiesNV &
      operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV *>( this );
    }

    operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default;
#else
    bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) &&
             ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) &&
             ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
    }

    bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Extent2D      shadingRateTexelSize        = {};
    uint32_t                            shadingRatePaletteSize      = {};
    uint32_t                            shadingRateMaxCoarseSamples = {};
  };
  static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) ==
                   sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImagePropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImagePropertiesNV>
  {
    using Type = PhysicalDeviceShadingRateImagePropertiesNV;
  };

  struct PhysicalDeviceSubgroupProperties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(
      uint32_t                                   subgroupSize_              = {},
      VULKAN_HPP_NAMESPACE::ShaderStageFlags     supportedStages_           = {},
      VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_       = {},
      VULKAN_HPP_NAMESPACE::Bool32               quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT
      : subgroupSize( subgroupSize_ )
      , supportedStages( supportedStages_ )
      , supportedOperations( supportedOperations_ )
      , quadOperationsInAllStages( quadOperationsInAllStages_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSubgroupProperties( *reinterpret_cast<PhysicalDeviceSubgroupProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupProperties &
                            operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties *>( this );
    }

    operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) &&
             ( supportedStages == rhs.supportedStages ) && ( supportedOperations == rhs.supportedOperations ) &&
             ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
    }

    bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType        sType               = StructureType::ePhysicalDeviceSubgroupProperties;
    void *                                     pNext               = {};
    uint32_t                                   subgroupSize        = {};
    VULKAN_HPP_NAMESPACE::ShaderStageFlags     supportedStages     = {};
    VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
    VULKAN_HPP_NAMESPACE::Bool32               quadOperationsInAllStages = {};
  };
  static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSubgroupProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
  {
    using Type = PhysicalDeviceSubgroupProperties;
  };

  struct PhysicalDeviceSubgroupSizeControlFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_  = {},
      VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT
      : subgroupSizeControl( subgroupSizeControl_ )
      , computeFullSubgroups( computeFullSubgroups_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT(
      PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSubgroupSizeControlFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeaturesEXT &
                            operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubgroupSizeControlFeaturesEXT &
      operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceSubgroupSizeControlFeaturesEXT &
      setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
    {
      subgroupSizeControl = subgroupSizeControl_;
      return *this;
    }

    PhysicalDeviceSubgroupSizeControlFeaturesEXT &
      setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
    {
      computeFullSubgroups = computeFullSubgroups_;
      return *this;
    }

    operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) &&
             ( computeFullSubgroups == rhs.computeFullSubgroups );
    }

    bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        subgroupSizeControl  = {};
    VULKAN_HPP_NAMESPACE::Bool32        computeFullSubgroups = {};
  };
  static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT>
  {
    using Type = PhysicalDeviceSubgroupSizeControlFeaturesEXT;
  };

  struct PhysicalDeviceSubgroupSizeControlPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT(
      uint32_t                               minSubgroupSize_              = {},
      uint32_t                               maxSubgroupSize_              = {},
      uint32_t                               maxComputeWorkgroupSubgroups_ = {},
      VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_   = {} ) VULKAN_HPP_NOEXCEPT
      : minSubgroupSize( minSubgroupSize_ )
      , maxSubgroupSize( maxSubgroupSize_ )
      , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ )
      , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT(
      PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSubgroupSizeControlPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlPropertiesEXT &
                            operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSubgroupSizeControlPropertiesEXT &
      operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) &&
             ( maxSubgroupSize == rhs.maxSubgroupSize ) &&
             ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) &&
             ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
    }

    bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType    sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
    void *                                 pNext = {};
    uint32_t                               minSubgroupSize              = {};
    uint32_t                               maxSubgroupSize              = {};
    uint32_t                               maxComputeWorkgroupSubgroups = {};
    VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages   = {};
  };
  static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT>
  {
    using Type = PhysicalDeviceSubgroupSizeControlPropertiesEXT;
  };

  struct PhysicalDeviceSynchronization2FeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceSynchronization2FeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {} ) VULKAN_HPP_NOEXCEPT : synchronization2( synchronization2_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2FeaturesKHR(
      PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSynchronization2FeaturesKHR( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceSynchronization2FeaturesKHR(
          *reinterpret_cast<PhysicalDeviceSynchronization2FeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2FeaturesKHR &
                            operator=( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceSynchronization2FeaturesKHR &
      operator=( VkPhysicalDeviceSynchronization2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2FeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceSynchronization2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceSynchronization2FeaturesKHR &
      setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT
    {
      synchronization2 = synchronization2_;
      return *this;
    }

    operator VkPhysicalDeviceSynchronization2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceSynchronization2FeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceSynchronization2FeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceSynchronization2FeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceSynchronization2FeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 );
    }

    bool operator!=( PhysicalDeviceSynchronization2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePhysicalDeviceSynchronization2FeaturesKHR;
    void *                              pNext            = {};
    VULKAN_HPP_NAMESPACE::Bool32        synchronization2 = {};
  };
  static_assert( sizeof( PhysicalDeviceSynchronization2FeaturesKHR ) ==
                   sizeof( VkPhysicalDeviceSynchronization2FeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceSynchronization2FeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceSynchronization2FeaturesKHR>
  {
    using Type = PhysicalDeviceSynchronization2FeaturesKHR;
  };

  struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
      : texelBufferAlignment( texelBufferAlignment_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(
      PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTexelBufferAlignmentFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT &
                            operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTexelBufferAlignmentFeaturesEXT &
      operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceTexelBufferAlignmentFeaturesEXT &
      setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
    {
      texelBufferAlignment = texelBufferAlignment_;
      return *this;
    }

    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment );
    }

    bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        texelBufferAlignment = {};
  };
  static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
  {
    using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
  };

  struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT(
      VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_       = {},
      VULKAN_HPP_NAMESPACE::Bool32     storageTexelBufferOffsetSingleTexelAlignment_ = {},
      VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_       = {},
      VULKAN_HPP_NAMESPACE::Bool32     uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT
      : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ )
      , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ )
      , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ )
      , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT(
      PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTexelBufferAlignmentPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentPropertiesEXT &
                            operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTexelBufferAlignmentPropertiesEXT &
      operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) &&
             ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) &&
             ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) &&
             ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
    }

    bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    storageTexelBufferOffsetAlignmentBytes       = {};
    VULKAN_HPP_NAMESPACE::Bool32        storageTexelBufferOffsetSingleTexelAlignment = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    uniformTexelBufferOffsetAlignmentBytes       = {};
    VULKAN_HPP_NAMESPACE::Bool32        uniformTexelBufferOffsetSingleTexelAlignment = {};
  };
  static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT>
  {
    using Type = PhysicalDeviceTexelBufferAlignmentPropertiesEXT;
  };

  struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT
      : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(
      PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(
      VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &
                            operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &
      operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &
      setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
    {
      textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
      return *this;
    }

    operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
    }

    bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        textureCompressionASTC_HDR = {};
  };
  static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT>
  {
    using Type = PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
  };

  struct PhysicalDeviceTimelineSemaphoreFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {} )
      VULKAN_HPP_NOEXCEPT : timelineSemaphore( timelineSemaphore_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTimelineSemaphoreFeatures(
          *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures &
                            operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTimelineSemaphoreFeatures &
      operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceTimelineSemaphoreFeatures &
      setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      timelineSemaphore = timelineSemaphore_;
      return *this;
    }

    operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
    }

    operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore );
    }

    bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
    void *                              pNext             = {};
    VULKAN_HPP_NAMESPACE::Bool32        timelineSemaphore = {};
  };
  static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) ==
                   sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
  {
    using Type = PhysicalDeviceTimelineSemaphoreFeatures;
  };
  using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;

  struct PhysicalDeviceTimelineSemaphoreProperties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceTimelineSemaphoreProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {} )
      VULKAN_HPP_NOEXCEPT : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(
      PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTimelineSemaphoreProperties(
          *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreProperties &
                            operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTimelineSemaphoreProperties &
      operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
    }

    operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default;
#else
    bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
    }

    bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
    void *                              pNext = {};
    uint64_t                            maxTimelineSemaphoreValueDifference = {};
  };
  static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) ==
                   sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
  {
    using Type = PhysicalDeviceTimelineSemaphoreProperties;
  };
  using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;

  struct PhysicalDeviceTransformFeedbackFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_   = {} ) VULKAN_HPP_NOEXCEPT
      : transformFeedback( transformFeedback_ )
      , geometryStreams( geometryStreams_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(
      PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTransformFeedbackFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT &
                            operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTransformFeedbackFeaturesEXT &
      operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceTransformFeedbackFeaturesEXT &
      setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
    {
      transformFeedback = transformFeedback_;
      return *this;
    }

    PhysicalDeviceTransformFeedbackFeaturesEXT &
      setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
    {
      geometryStreams = geometryStreams_;
      return *this;
    }

    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) &&
             ( geometryStreams == rhs.geometryStreams );
    }

    bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
    void *                              pNext             = {};
    VULKAN_HPP_NAMESPACE::Bool32        transformFeedback = {};
    VULKAN_HPP_NAMESPACE::Bool32        geometryStreams   = {};
  };
  static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT>
  {
    using Type = PhysicalDeviceTransformFeedbackFeaturesEXT;
  };

  struct PhysicalDeviceTransformFeedbackPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(
      uint32_t                         maxTransformFeedbackStreams_                = {},
      uint32_t                         maxTransformFeedbackBuffers_                = {},
      VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_             = {},
      uint32_t                         maxTransformFeedbackStreamDataSize_         = {},
      uint32_t                         maxTransformFeedbackBufferDataSize_         = {},
      uint32_t                         maxTransformFeedbackBufferDataStride_       = {},
      VULKAN_HPP_NAMESPACE::Bool32     transformFeedbackQueries_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32     transformFeedbackStreamsLinesTriangles_     = {},
      VULKAN_HPP_NAMESPACE::Bool32     transformFeedbackRasterizationStreamSelect_ = {},
      VULKAN_HPP_NAMESPACE::Bool32     transformFeedbackDraw_                      = {} ) VULKAN_HPP_NOEXCEPT
      : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ )
      , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ )
      , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ )
      , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ )
      , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ )
      , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ )
      , transformFeedbackQueries( transformFeedbackQueries_ )
      , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ )
      , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ )
      , transformFeedbackDraw( transformFeedbackDraw_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(
      PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceTransformFeedbackPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackPropertiesEXT &
                            operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceTransformFeedbackPropertiesEXT &
      operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) &&
             ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) &&
             ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) &&
             ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) &&
             ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) &&
             ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) &&
             ( transformFeedbackQueries == rhs.transformFeedbackQueries ) &&
             ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) &&
             ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) &&
             ( transformFeedbackDraw == rhs.transformFeedbackDraw );
    }

    bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
    void *                              pNext = {};
    uint32_t                            maxTransformFeedbackStreams                = {};
    uint32_t                            maxTransformFeedbackBuffers                = {};
    VULKAN_HPP_NAMESPACE::DeviceSize    maxTransformFeedbackBufferSize             = {};
    uint32_t                            maxTransformFeedbackStreamDataSize         = {};
    uint32_t                            maxTransformFeedbackBufferDataSize         = {};
    uint32_t                            maxTransformFeedbackBufferDataStride       = {};
    VULKAN_HPP_NAMESPACE::Bool32        transformFeedbackQueries                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        transformFeedbackStreamsLinesTriangles     = {};
    VULKAN_HPP_NAMESPACE::Bool32        transformFeedbackRasterizationStreamSelect = {};
    VULKAN_HPP_NAMESPACE::Bool32        transformFeedbackDraw                      = {};
  };
  static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT>
  {
    using Type = PhysicalDeviceTransformFeedbackPropertiesEXT;
  };

  struct PhysicalDeviceUniformBufferStandardLayoutFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT
      : uniformBufferStandardLayout( uniformBufferStandardLayout_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(
      PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceUniformBufferStandardLayoutFeatures(
          *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures &
                            operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceUniformBufferStandardLayoutFeatures &
      operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceUniformBufferStandardLayoutFeatures &
      setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformBufferStandardLayout = uniformBufferStandardLayout_;
      return *this;
    }

    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
    }

    operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
    }

    bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        uniformBufferStandardLayout = {};
  };
  static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) ==
                   sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceUniformBufferStandardLayoutFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
  {
    using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
  };
  using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;

  struct PhysicalDeviceVariablePointersFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceVariablePointersFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {},
                                              VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT
      : variablePointersStorageBuffer( variablePointersStorageBuffer_ )
      , variablePointers( variablePointers_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVariablePointersFeatures(
          *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures &
                            operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVariablePointersFeatures &
      operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer(
      VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      variablePointersStorageBuffer = variablePointersStorageBuffer_;
      return *this;
    }

    PhysicalDeviceVariablePointersFeatures &
      setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
    {
      variablePointers = variablePointers_;
      return *this;
    }

    operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures *>( this );
    }

    operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) &&
             ( variablePointers == rhs.variablePointers );
    }

    bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        variablePointersStorageBuffer = {};
    VULKAN_HPP_NAMESPACE::Bool32        variablePointers              = {};
  };
  static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVariablePointersFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
  {
    using Type = PhysicalDeviceVariablePointersFeatures;
  };
  using PhysicalDeviceVariablePointerFeatures     = PhysicalDeviceVariablePointersFeatures;
  using PhysicalDeviceVariablePointerFeaturesKHR  = PhysicalDeviceVariablePointersFeatures;
  using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;

  struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_     = {},
      VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
      : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ )
      , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(
      PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVertexAttributeDivisorFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT &
                            operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexAttributeDivisorFeaturesEXT &
      operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor(
      VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
      return *this;
    }

    PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor(
      VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
      return *this;
    }

    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) &&
             ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
    }

    bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        vertexAttributeInstanceRateDivisor     = {};
    VULKAN_HPP_NAMESPACE::Bool32        vertexAttributeInstanceRateZeroDivisor = {};
  };
  static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT>
  {
    using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
  };

  struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(
      PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVertexAttributeDivisorPropertiesEXT(
          *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorPropertiesEXT &
                            operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexAttributeDivisorPropertiesEXT &
      operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
    }

    bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
    void *                              pNext = {};
    uint32_t                            maxVertexAttribDivisor = {};
  };
  static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) ==
                   sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
  {
    using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
  };

  struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {} ) VULKAN_HPP_NOEXCEPT
      : vertexInputDynamicState( vertexInputDynamicState_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT(
      PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVertexInputDynamicStateFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT &
                            operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVertexInputDynamicStateFeaturesEXT &
      operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceVertexInputDynamicStateFeaturesEXT &
      setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexInputDynamicState = vertexInputDynamicState_;
      return *this;
    }

    operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( vertexInputDynamicState == rhs.vertexInputDynamicState );
    }

    bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        vertexInputDynamicState = {};
  };
  static_assert( sizeof( PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT>
  {
    using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
  };

  struct PhysicalDeviceVulkan11Features
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_           = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_              = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_               = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 multiview_                          = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_            = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_        = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_      = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 variablePointers_                   = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_                    = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_             = {},
                                      VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT
      : storageBuffer16BitAccess( storageBuffer16BitAccess_ )
      , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ )
      , storagePushConstant16( storagePushConstant16_ )
      , storageInputOutput16( storageInputOutput16_ )
      , multiview( multiview_ )
      , multiviewGeometryShader( multiviewGeometryShader_ )
      , multiviewTessellationShader( multiviewTessellationShader_ )
      , variablePointersStorageBuffer( variablePointersStorageBuffer_ )
      , variablePointers( variablePointers_ )
      , protectedMemory( protectedMemory_ )
      , samplerYcbcrConversion( samplerYcbcrConversion_ )
      , shaderDrawParameters( shaderDrawParameters_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan11Features( *reinterpret_cast<PhysicalDeviceVulkan11Features const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features &
                            operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>( &rhs );
      return *this;
    }

    PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceVulkan11Features &
      setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      storageBuffer16BitAccess = storageBuffer16BitAccess_;
      return *this;
    }

    PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess(
      VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
      return *this;
    }

    PhysicalDeviceVulkan11Features &
      setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
    {
      storagePushConstant16 = storagePushConstant16_;
      return *this;
    }

    PhysicalDeviceVulkan11Features &
      setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
    {
      storageInputOutput16 = storageInputOutput16_;
      return *this;
    }

    PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
    {
      multiview = multiview_;
      return *this;
    }

    PhysicalDeviceVulkan11Features &
      setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewGeometryShader = multiviewGeometryShader_;
      return *this;
    }

    PhysicalDeviceVulkan11Features &
      setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
    {
      multiviewTessellationShader = multiviewTessellationShader_;
      return *this;
    }

    PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer(
      VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      variablePointersStorageBuffer = variablePointersStorageBuffer_;
      return *this;
    }

    PhysicalDeviceVulkan11Features &
      setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
    {
      variablePointers = variablePointers_;
      return *this;
    }

    PhysicalDeviceVulkan11Features &
      setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      protectedMemory = protectedMemory_;
      return *this;
    }

    PhysicalDeviceVulkan11Features &
      setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerYcbcrConversion = samplerYcbcrConversion_;
      return *this;
    }

    PhysicalDeviceVulkan11Features &
      setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderDrawParameters = shaderDrawParameters_;
      return *this;
    }

    operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features *>( this );
    }

    operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan11Features *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) &&
             ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) &&
             ( storagePushConstant16 == rhs.storagePushConstant16 ) &&
             ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) &&
             ( multiviewGeometryShader == rhs.multiviewGeometryShader ) &&
             ( multiviewTessellationShader == rhs.multiviewTessellationShader ) &&
             ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) &&
             ( variablePointers == rhs.variablePointers ) && ( protectedMemory == rhs.protectedMemory ) &&
             ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) &&
             ( shaderDrawParameters == rhs.shaderDrawParameters );
    }

    bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                    = StructureType::ePhysicalDeviceVulkan11Features;
    void *                              pNext                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        storageBuffer16BitAccess = {};
    VULKAN_HPP_NAMESPACE::Bool32        uniformAndStorageBuffer16BitAccess = {};
    VULKAN_HPP_NAMESPACE::Bool32        storagePushConstant16              = {};
    VULKAN_HPP_NAMESPACE::Bool32        storageInputOutput16               = {};
    VULKAN_HPP_NAMESPACE::Bool32        multiview                          = {};
    VULKAN_HPP_NAMESPACE::Bool32        multiviewGeometryShader            = {};
    VULKAN_HPP_NAMESPACE::Bool32        multiviewTessellationShader        = {};
    VULKAN_HPP_NAMESPACE::Bool32        variablePointersStorageBuffer      = {};
    VULKAN_HPP_NAMESPACE::Bool32        variablePointers                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        protectedMemory                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        samplerYcbcrConversion             = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderDrawParameters               = {};
  };
  static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Features>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
  {
    using Type = PhysicalDeviceVulkan11Features;
  };

  struct PhysicalDeviceVulkan11Properties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(
      std::array<uint8_t, VK_UUID_SIZE> const &   deviceUUID_                        = {},
      std::array<uint8_t, VK_UUID_SIZE> const &   driverUUID_                        = {},
      std::array<uint8_t, VK_LUID_SIZE> const &   deviceLUID_                        = {},
      uint32_t                                    deviceNodeMask_                    = {},
      VULKAN_HPP_NAMESPACE::Bool32                deviceLUIDValid_                   = {},
      uint32_t                                    subgroupSize_                      = {},
      VULKAN_HPP_NAMESPACE::ShaderStageFlags      subgroupSupportedStages_           = {},
      VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags  subgroupSupportedOperations_       = {},
      VULKAN_HPP_NAMESPACE::Bool32                subgroupQuadOperationsInAllStages_ = {},
      VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ =
        VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes,
      uint32_t                         maxMultiviewViewCount_     = {},
      uint32_t                         maxMultiviewInstanceIndex_ = {},
      VULKAN_HPP_NAMESPACE::Bool32     protectedNoFault_          = {},
      uint32_t                         maxPerSetDescriptors_      = {},
      VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_   = {} ) VULKAN_HPP_NOEXCEPT
      : deviceUUID( deviceUUID_ )
      , driverUUID( driverUUID_ )
      , deviceLUID( deviceLUID_ )
      , deviceNodeMask( deviceNodeMask_ )
      , deviceLUIDValid( deviceLUIDValid_ )
      , subgroupSize( subgroupSize_ )
      , subgroupSupportedStages( subgroupSupportedStages_ )
      , subgroupSupportedOperations( subgroupSupportedOperations_ )
      , subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ )
      , pointClippingBehavior( pointClippingBehavior_ )
      , maxMultiviewViewCount( maxMultiviewViewCount_ )
      , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
      , protectedNoFault( protectedNoFault_ )
      , maxPerSetDescriptors( maxPerSetDescriptors_ )
      , maxMemoryAllocationSize( maxMemoryAllocationSize_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan11Properties( *reinterpret_cast<PhysicalDeviceVulkan11Properties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties &
                            operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) &&
             ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) &&
             ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) &&
             ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) &&
             ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) &&
             ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) &&
             ( pointClippingBehavior == rhs.pointClippingBehavior ) &&
             ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) &&
             ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) &&
             ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) &&
             ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
    }

    bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID                        = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID                        = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID                        = {};
    uint32_t                                                    deviceNodeMask                    = {};
    VULKAN_HPP_NAMESPACE::Bool32                                deviceLUIDValid                   = {};
    uint32_t                                                    subgroupSize                      = {};
    VULKAN_HPP_NAMESPACE::ShaderStageFlags                      subgroupSupportedStages           = {};
    VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags                  subgroupSupportedOperations       = {};
    VULKAN_HPP_NAMESPACE::Bool32                                subgroupQuadOperationsInAllStages = {};
    VULKAN_HPP_NAMESPACE::PointClippingBehavior                 pointClippingBehavior =
      VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
    uint32_t                         maxMultiviewViewCount     = {};
    uint32_t                         maxMultiviewInstanceIndex = {};
    VULKAN_HPP_NAMESPACE::Bool32     protectedNoFault          = {};
    uint32_t                         maxPerSetDescriptors      = {};
    VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize   = {};
  };
  static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Properties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
  {
    using Type = PhysicalDeviceVulkan11Properties;
  };

  struct PhysicalDeviceVulkan12Features
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(
      VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_                           = {},
      VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_                                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_                            = {},
      VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_                               = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_                           = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_                           = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_                                      = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_                                         = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_                                 = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_         = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_         = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_    = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_    = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_       = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_          = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_                    = {},
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_           = {},
      VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_                             = {},
      VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_                                = {},
      VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_                                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_                               = {},
      VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_                        = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_                        = {},
      VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_                        = {},
      VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_                                     = {},
      VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_                                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_                                = {},
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_                     = {},
      VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_                                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_                       = {},
      VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_      = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_                          = {},
      VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_                                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_                         = {} ) VULKAN_HPP_NOEXCEPT
      : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ )
      , drawIndirectCount( drawIndirectCount_ )
      , storageBuffer8BitAccess( storageBuffer8BitAccess_ )
      , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ )
      , storagePushConstant8( storagePushConstant8_ )
      , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ )
      , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
      , shaderFloat16( shaderFloat16_ )
      , shaderInt8( shaderInt8_ )
      , descriptorIndexing( descriptorIndexing_ )
      , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ )
      , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ )
      , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ )
      , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ )
      , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ )
      , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ )
      , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ )
      , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ )
      , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ )
      , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ )
      , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ )
      , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ )
      , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ )
      , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ )
      , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ )
      , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ )
      , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ )
      , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ )
      , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ )
      , runtimeDescriptorArray( runtimeDescriptorArray_ )
      , samplerFilterMinmax( samplerFilterMinmax_ )
      , scalarBlockLayout( scalarBlockLayout_ )
      , imagelessFramebuffer( imagelessFramebuffer_ )
      , uniformBufferStandardLayout( uniformBufferStandardLayout_ )
      , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
      , separateDepthStencilLayouts( separateDepthStencilLayouts_ )
      , hostQueryReset( hostQueryReset_ )
      , timelineSemaphore( timelineSemaphore_ )
      , bufferDeviceAddress( bufferDeviceAddress_ )
      , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ )
      , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
      , vulkanMemoryModel( vulkanMemoryModel_ )
      , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
      , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
      , shaderOutputViewportIndex( shaderOutputViewportIndex_ )
      , shaderOutputLayer( shaderOutputLayer_ )
      , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan12Features( *reinterpret_cast<PhysicalDeviceVulkan12Features const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features &
                            operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>( &rhs );
      return *this;
    }

    PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
    {
      drawIndirectCount = drawIndirectCount_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      storageBuffer8BitAccess = storageBuffer8BitAccess_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess(
      VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
    {
      storagePushConstant8 = storagePushConstant8_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderFloat16 = shaderFloat16_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInt8 = shaderInt8_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorIndexing = descriptorIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing(
      VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount(
      VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
    {
      runtimeDescriptorArray = runtimeDescriptorArray_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
    {
      samplerFilterMinmax = samplerFilterMinmax_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      scalarBlockLayout = scalarBlockLayout_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
    {
      imagelessFramebuffer = imagelessFramebuffer_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      uniformBufferStandardLayout = uniformBufferStandardLayout_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
    {
      separateDepthStencilLayouts = separateDepthStencilLayouts_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
    {
      hostQueryReset = hostQueryReset_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
    {
      timelineSemaphore = timelineSemaphore_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddress = bufferDeviceAddress_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay(
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice(
      VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
    {
      bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModel = vulkanMemoryModel_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
      return *this;
    }

    PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains(
      VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderOutputViewportIndex = shaderOutputViewportIndex_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderOutputLayer = shaderOutputLayer_;
      return *this;
    }

    PhysicalDeviceVulkan12Features &
      setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
    {
      subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
      return *this;
    }

    operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features *>( this );
    }

    operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan12Features *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) &&
             ( drawIndirectCount == rhs.drawIndirectCount ) &&
             ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) &&
             ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) &&
             ( storagePushConstant8 == rhs.storagePushConstant8 ) &&
             ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) &&
             ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && ( shaderFloat16 == rhs.shaderFloat16 ) &&
             ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) &&
             ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) &&
             ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) &&
             ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) &&
             ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) &&
             ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) &&
             ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) &&
             ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) &&
             ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) &&
             ( shaderUniformTexelBufferArrayNonUniformIndexing ==
               rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) &&
             ( shaderStorageTexelBufferArrayNonUniformIndexing ==
               rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) &&
             ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) &&
             ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) &&
             ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) &&
             ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) &&
             ( descriptorBindingUniformTexelBufferUpdateAfterBind ==
               rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) &&
             ( descriptorBindingStorageTexelBufferUpdateAfterBind ==
               rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) &&
             ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) &&
             ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) &&
             ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) &&
             ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) &&
             ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && ( scalarBlockLayout == rhs.scalarBlockLayout ) &&
             ( imagelessFramebuffer == rhs.imagelessFramebuffer ) &&
             ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) &&
             ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) &&
             ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) &&
             ( hostQueryReset == rhs.hostQueryReset ) && ( timelineSemaphore == rhs.timelineSemaphore ) &&
             ( bufferDeviceAddress == rhs.bufferDeviceAddress ) &&
             ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) &&
             ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) &&
             ( vulkanMemoryModel == rhs.vulkanMemoryModel ) &&
             ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) &&
             ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) &&
             ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) &&
             ( shaderOutputLayer == rhs.shaderOutputLayer ) &&
             ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
    }

    bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                    = StructureType::ePhysicalDeviceVulkan12Features;
    void *                              pNext                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        samplerMirrorClampToEdge = {};
    VULKAN_HPP_NAMESPACE::Bool32        drawIndirectCount        = {};
    VULKAN_HPP_NAMESPACE::Bool32        storageBuffer8BitAccess  = {};
    VULKAN_HPP_NAMESPACE::Bool32        uniformAndStorageBuffer8BitAccess                  = {};
    VULKAN_HPP_NAMESPACE::Bool32        storagePushConstant8                               = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderBufferInt64Atomics                           = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSharedInt64Atomics                           = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderFloat16                                      = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderInt8                                         = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorIndexing                                 = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderInputAttachmentArrayDynamicIndexing          = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderUniformTexelBufferArrayDynamicIndexing       = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageTexelBufferArrayDynamicIndexing       = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderUniformBufferArrayNonUniformIndexing         = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSampledImageArrayNonUniformIndexing          = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageBufferArrayNonUniformIndexing         = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageImageArrayNonUniformIndexing          = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderInputAttachmentArrayNonUniformIndexing       = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderUniformTexelBufferArrayNonUniformIndexing    = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderStorageTexelBufferArrayNonUniformIndexing    = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingUniformBufferUpdateAfterBind      = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingSampledImageUpdateAfterBind       = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingStorageImageUpdateAfterBind       = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingStorageBufferUpdateAfterBind      = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingUniformTexelBufferUpdateAfterBind = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingStorageTexelBufferUpdateAfterBind = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingUpdateUnusedWhilePending          = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingPartiallyBound                    = {};
    VULKAN_HPP_NAMESPACE::Bool32        descriptorBindingVariableDescriptorCount           = {};
    VULKAN_HPP_NAMESPACE::Bool32        runtimeDescriptorArray                             = {};
    VULKAN_HPP_NAMESPACE::Bool32        samplerFilterMinmax                                = {};
    VULKAN_HPP_NAMESPACE::Bool32        scalarBlockLayout                                  = {};
    VULKAN_HPP_NAMESPACE::Bool32        imagelessFramebuffer                               = {};
    VULKAN_HPP_NAMESPACE::Bool32        uniformBufferStandardLayout                        = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderSubgroupExtendedTypes                        = {};
    VULKAN_HPP_NAMESPACE::Bool32        separateDepthStencilLayouts                        = {};
    VULKAN_HPP_NAMESPACE::Bool32        hostQueryReset                                     = {};
    VULKAN_HPP_NAMESPACE::Bool32        timelineSemaphore                                  = {};
    VULKAN_HPP_NAMESPACE::Bool32        bufferDeviceAddress                                = {};
    VULKAN_HPP_NAMESPACE::Bool32        bufferDeviceAddressCaptureReplay                   = {};
    VULKAN_HPP_NAMESPACE::Bool32        bufferDeviceAddressMultiDevice                     = {};
    VULKAN_HPP_NAMESPACE::Bool32        vulkanMemoryModel                                  = {};
    VULKAN_HPP_NAMESPACE::Bool32        vulkanMemoryModelDeviceScope                       = {};
    VULKAN_HPP_NAMESPACE::Bool32        vulkanMemoryModelAvailabilityVisibilityChains      = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderOutputViewportIndex                          = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderOutputLayer                                  = {};
    VULKAN_HPP_NAMESPACE::Bool32        subgroupBroadcastDynamicId                         = {};
  };
  static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Features>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
  {
    using Type = PhysicalDeviceVulkan12Features;
  };

  struct PhysicalDeviceVulkan12Properties
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(
      VULKAN_HPP_NAMESPACE::DriverId                        driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary,
      std::array<char, VK_MAX_DRIVER_NAME_SIZE> const &     driverName_         = {},
      std::array<char, VK_MAX_DRIVER_INFO_SIZE> const &     driverInfo_         = {},
      VULKAN_HPP_NAMESPACE::ConformanceVersion              conformanceVersion_ = {},
      VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ =
        VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
      VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ =
        VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly,
      VULKAN_HPP_NAMESPACE::Bool32           shaderSignedZeroInfNanPreserveFloat16_                = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderSignedZeroInfNanPreserveFloat32_                = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderSignedZeroInfNanPreserveFloat64_                = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderDenormPreserveFloat16_                          = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderDenormPreserveFloat32_                          = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderDenormPreserveFloat64_                          = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderDenormFlushToZeroFloat16_                       = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderDenormFlushToZeroFloat32_                       = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderDenormFlushToZeroFloat64_                       = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTEFloat16_                         = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTEFloat32_                         = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTEFloat64_                         = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTZFloat16_                         = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTZFloat32_                         = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTZFloat64_                         = {},
      uint32_t                               maxUpdateAfterBindDescriptorsInAllPools_              = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderUniformBufferArrayNonUniformIndexingNative_     = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderSampledImageArrayNonUniformIndexingNative_      = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderStorageBufferArrayNonUniformIndexingNative_     = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderStorageImageArrayNonUniformIndexingNative_      = {},
      VULKAN_HPP_NAMESPACE::Bool32           shaderInputAttachmentArrayNonUniformIndexingNative_   = {},
      VULKAN_HPP_NAMESPACE::Bool32           robustBufferAccessUpdateAfterBind_                    = {},
      VULKAN_HPP_NAMESPACE::Bool32           quadDivergentImplicitLod_                             = {},
      uint32_t                               maxPerStageDescriptorUpdateAfterBindSamplers_         = {},
      uint32_t                               maxPerStageDescriptorUpdateAfterBindUniformBuffers_   = {},
      uint32_t                               maxPerStageDescriptorUpdateAfterBindStorageBuffers_   = {},
      uint32_t                               maxPerStageDescriptorUpdateAfterBindSampledImages_    = {},
      uint32_t                               maxPerStageDescriptorUpdateAfterBindStorageImages_    = {},
      uint32_t                               maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {},
      uint32_t                               maxPerStageUpdateAfterBindResources_                  = {},
      uint32_t                               maxDescriptorSetUpdateAfterBindSamplers_              = {},
      uint32_t                               maxDescriptorSetUpdateAfterBindUniformBuffers_        = {},
      uint32_t                               maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {},
      uint32_t                               maxDescriptorSetUpdateAfterBindStorageBuffers_        = {},
      uint32_t                               maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {},
      uint32_t                               maxDescriptorSetUpdateAfterBindSampledImages_         = {},
      uint32_t                               maxDescriptorSetUpdateAfterBindStorageImages_         = {},
      uint32_t                               maxDescriptorSetUpdateAfterBindInputAttachments_      = {},
      VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_                           = {},
      VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_                         = {},
      VULKAN_HPP_NAMESPACE::Bool32           independentResolveNone_                               = {},
      VULKAN_HPP_NAMESPACE::Bool32           independentResolve_                                   = {},
      VULKAN_HPP_NAMESPACE::Bool32           filterMinmaxSingleComponentFormats_                   = {},
      VULKAN_HPP_NAMESPACE::Bool32           filterMinmaxImageComponentMapping_                    = {},
      uint64_t                               maxTimelineSemaphoreValueDifference_                  = {},
      VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {} ) VULKAN_HPP_NOEXCEPT
      : driverID( driverID_ )
      , driverName( driverName_ )
      , driverInfo( driverInfo_ )
      , conformanceVersion( conformanceVersion_ )
      , denormBehaviorIndependence( denormBehaviorIndependence_ )
      , roundingModeIndependence( roundingModeIndependence_ )
      , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ )
      , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ )
      , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ )
      , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ )
      , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ )
      , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ )
      , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ )
      , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ )
      , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ )
      , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ )
      , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ )
      , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ )
      , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ )
      , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ )
      , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
      , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ )
      , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ )
      , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ )
      , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ )
      , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ )
      , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ )
      , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ )
      , quadDivergentImplicitLod( quadDivergentImplicitLod_ )
      , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ )
      , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ )
      , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ )
      , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ )
      , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ )
      , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ )
      , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ )
      , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ )
      , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ )
      , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ )
      , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ )
      , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ )
      , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ )
      , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ )
      , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
      , supportedDepthResolveModes( supportedDepthResolveModes_ )
      , supportedStencilResolveModes( supportedStencilResolveModes_ )
      , independentResolveNone( independentResolveNone_ )
      , independentResolve( independentResolve_ )
      , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ )
      , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
      , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
      , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkan12Properties( *reinterpret_cast<PhysicalDeviceVulkan12Properties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties &
                            operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>( &rhs );
      return *this;
    }

    operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties *>( this );
    }

    operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) &&
             ( driverName == rhs.driverName ) && ( driverInfo == rhs.driverInfo ) &&
             ( conformanceVersion == rhs.conformanceVersion ) &&
             ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) &&
             ( roundingModeIndependence == rhs.roundingModeIndependence ) &&
             ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) &&
             ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) &&
             ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) &&
             ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) &&
             ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) &&
             ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) &&
             ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) &&
             ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) &&
             ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) &&
             ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) &&
             ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) &&
             ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) &&
             ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) &&
             ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) &&
             ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) &&
             ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) &&
             ( shaderUniformBufferArrayNonUniformIndexingNative ==
               rhs.shaderUniformBufferArrayNonUniformIndexingNative ) &&
             ( shaderSampledImageArrayNonUniformIndexingNative ==
               rhs.shaderSampledImageArrayNonUniformIndexingNative ) &&
             ( shaderStorageBufferArrayNonUniformIndexingNative ==
               rhs.shaderStorageBufferArrayNonUniformIndexingNative ) &&
             ( shaderStorageImageArrayNonUniformIndexingNative ==
               rhs.shaderStorageImageArrayNonUniformIndexingNative ) &&
             ( shaderInputAttachmentArrayNonUniformIndexingNative ==
               rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) &&
             ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) &&
             ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) &&
             ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) &&
             ( maxPerStageDescriptorUpdateAfterBindUniformBuffers ==
               rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) &&
             ( maxPerStageDescriptorUpdateAfterBindStorageBuffers ==
               rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) &&
             ( maxPerStageDescriptorUpdateAfterBindSampledImages ==
               rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) &&
             ( maxPerStageDescriptorUpdateAfterBindStorageImages ==
               rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) &&
             ( maxPerStageDescriptorUpdateAfterBindInputAttachments ==
               rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) &&
             ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) &&
             ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) &&
             ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) &&
             ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ==
               rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) &&
             ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ==
               rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) &&
             ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) &&
             ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) &&
             ( maxDescriptorSetUpdateAfterBindInputAttachments ==
               rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) &&
             ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) &&
             ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) &&
             ( independentResolveNone == rhs.independentResolveNone ) &&
             ( independentResolve == rhs.independentResolve ) &&
             ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) &&
             ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) &&
             ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) &&
             ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
    }

    bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::ePhysicalDeviceVulkan12Properties;
    void *                              pNext    = {};
    VULKAN_HPP_NAMESPACE::DriverId      driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName         = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo         = {};
    VULKAN_HPP_NAMESPACE::ConformanceVersion                            conformanceVersion = {};
    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence               denormBehaviorIndependence =
      VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
    VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence =
      VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
    VULKAN_HPP_NAMESPACE::Bool32           shaderSignedZeroInfNanPreserveFloat16                = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderSignedZeroInfNanPreserveFloat32                = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderSignedZeroInfNanPreserveFloat64                = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderDenormPreserveFloat16                          = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderDenormPreserveFloat32                          = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderDenormPreserveFloat64                          = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderDenormFlushToZeroFloat16                       = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderDenormFlushToZeroFloat32                       = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderDenormFlushToZeroFloat64                       = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTEFloat16                         = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTEFloat32                         = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTEFloat64                         = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTZFloat16                         = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTZFloat32                         = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderRoundingModeRTZFloat64                         = {};
    uint32_t                               maxUpdateAfterBindDescriptorsInAllPools              = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderUniformBufferArrayNonUniformIndexingNative     = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderSampledImageArrayNonUniformIndexingNative      = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderStorageBufferArrayNonUniformIndexingNative     = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderStorageImageArrayNonUniformIndexingNative      = {};
    VULKAN_HPP_NAMESPACE::Bool32           shaderInputAttachmentArrayNonUniformIndexingNative   = {};
    VULKAN_HPP_NAMESPACE::Bool32           robustBufferAccessUpdateAfterBind                    = {};
    VULKAN_HPP_NAMESPACE::Bool32           quadDivergentImplicitLod                             = {};
    uint32_t                               maxPerStageDescriptorUpdateAfterBindSamplers         = {};
    uint32_t                               maxPerStageDescriptorUpdateAfterBindUniformBuffers   = {};
    uint32_t                               maxPerStageDescriptorUpdateAfterBindStorageBuffers   = {};
    uint32_t                               maxPerStageDescriptorUpdateAfterBindSampledImages    = {};
    uint32_t                               maxPerStageDescriptorUpdateAfterBindStorageImages    = {};
    uint32_t                               maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
    uint32_t                               maxPerStageUpdateAfterBindResources                  = {};
    uint32_t                               maxDescriptorSetUpdateAfterBindSamplers              = {};
    uint32_t                               maxDescriptorSetUpdateAfterBindUniformBuffers        = {};
    uint32_t                               maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
    uint32_t                               maxDescriptorSetUpdateAfterBindStorageBuffers        = {};
    uint32_t                               maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
    uint32_t                               maxDescriptorSetUpdateAfterBindSampledImages         = {};
    uint32_t                               maxDescriptorSetUpdateAfterBindStorageImages         = {};
    uint32_t                               maxDescriptorSetUpdateAfterBindInputAttachments      = {};
    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes                           = {};
    VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes                         = {};
    VULKAN_HPP_NAMESPACE::Bool32           independentResolveNone                               = {};
    VULKAN_HPP_NAMESPACE::Bool32           independentResolve                                   = {};
    VULKAN_HPP_NAMESPACE::Bool32           filterMinmaxSingleComponentFormats                   = {};
    VULKAN_HPP_NAMESPACE::Bool32           filterMinmaxImageComponentMapping                    = {};
    uint64_t                               maxTimelineSemaphoreValueDifference                  = {};
    VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts                  = {};
  };
  static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Properties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
  {
    using Type = PhysicalDeviceVulkan12Properties;
  };

  struct PhysicalDeviceVulkanMemoryModelFeatures
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(
      VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_                             = {},
      VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT
      : vulkanMemoryModel( vulkanMemoryModel_ )
      , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ )
      , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceVulkanMemoryModelFeatures(
          *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures &
                            operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceVulkanMemoryModelFeatures &
      operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
      return *this;
    }

    PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceVulkanMemoryModelFeatures &
      setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModel = vulkanMemoryModel_;
      return *this;
    }

    PhysicalDeviceVulkanMemoryModelFeatures &
      setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
      return *this;
    }

    PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains(
      VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
    {
      vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
      return *this;
    }

    operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
    }

    operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default;
#else
    bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) &&
             ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) &&
             ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
    }

    bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
    void *                              pNext             = {};
    VULKAN_HPP_NAMESPACE::Bool32        vulkanMemoryModel = {};
    VULKAN_HPP_NAMESPACE::Bool32        vulkanMemoryModelDeviceScope                  = {};
    VULKAN_HPP_NAMESPACE::Bool32        vulkanMemoryModelAvailabilityVisibilityChains = {};
  };
  static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) ==
                   sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceVulkanMemoryModelFeatures>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
  {
    using Type = PhysicalDeviceVulkanMemoryModelFeatures;
  };
  using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;

  struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_                  = {},
      VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_        = {},
      VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_       = {} ) VULKAN_HPP_NOEXCEPT
      : workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ )
      , workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ )
      , workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ )
      , workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(
      PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(
      VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(
          *reinterpret_cast<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
                            operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &
      operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout(
      VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_;
      return *this;
    }

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayoutScalarBlockLayout(
      VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_;
      return *this;
    }

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout8BitAccess(
      VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_;
      return *this;
    }

    PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout16BitAccess(
      VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT
    {
      workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_;
      return *this;
    }

    operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) &&
             ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) &&
             ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) &&
             ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess );
    }

    bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        workgroupMemoryExplicitLayout                  = {};
    VULKAN_HPP_NAMESPACE::Bool32        workgroupMemoryExplicitLayoutScalarBlockLayout = {};
    VULKAN_HPP_NAMESPACE::Bool32        workgroupMemoryExplicitLayout8BitAccess        = {};
    VULKAN_HPP_NAMESPACE::Bool32        workgroupMemoryExplicitLayout16BitAccess       = {};
  };
  static_assert( sizeof( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) ==
                   sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>
  {
    using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
  };

  struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {} ) VULKAN_HPP_NOEXCEPT
      : ycbcr2plane444Formats( ycbcr2plane444Formats_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(
      PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &
                            operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &
      operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &
      setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT
    {
      ycbcr2plane444Formats = ycbcr2plane444Formats_;
      return *this;
    }

    operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats );
    }

    bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        ycbcr2plane444Formats = {};
  };
  static_assert( sizeof( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>
  {
    using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
  };

  struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(
      VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT : ycbcrImageArrays( ycbcrImageArrays_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(
      PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceYcbcrImageArraysFeaturesEXT(
          *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT &
                            operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceYcbcrImageArraysFeaturesEXT &
      operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
      return *this;
    }

    PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceYcbcrImageArraysFeaturesEXT &
      setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
    {
      ycbcrImageArrays = ycbcrImageArrays_;
      return *this;
    }

    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
    }

    operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default;
#else
    bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays );
    }

    bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType            = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
    void *                              pNext            = {};
    VULKAN_HPP_NAMESPACE::Bool32        ycbcrImageArrays = {};
  };
  static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) ==
                   sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
  {
    using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
  };

  struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR(
      VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ )
    {}

    VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR(
      PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR(
      VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR(
          *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR &
                            operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR &
      operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const *>( &rhs );
      return *this;
    }

    PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & setShaderZeroInitializeWorkgroupMemory(
      VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
    {
      shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
      return *this;
    }

    operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR *>( this );
    }

    operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & ) const = default;
#else
    bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory );
    }

    bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shaderZeroInitializeWorkgroupMemory = {};
  };
  static_assert( sizeof( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR ) ==
                   sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR>
  {
    using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR;
  };

  struct PipelineColorBlendAdvancedStateCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::Bool32          srcPremultiplied_ = {},
      VULKAN_HPP_NAMESPACE::Bool32          dstPremultiplied_ = {},
      VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_     = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated )
      VULKAN_HPP_NOEXCEPT
      : srcPremultiplied( srcPremultiplied_ )
      , dstPremultiplied( dstPremultiplied_ )
      , blendOverlap( blendOverlap_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(
      PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineColorBlendAdvancedStateCreateInfoEXT(
          *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT &
                            operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorBlendAdvancedStateCreateInfoEXT &
      operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineColorBlendAdvancedStateCreateInfoEXT &
      setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
    {
      srcPremultiplied = srcPremultiplied_;
      return *this;
    }

    PipelineColorBlendAdvancedStateCreateInfoEXT &
      setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
    {
      dstPremultiplied = dstPremultiplied_;
      return *this;
    }

    PipelineColorBlendAdvancedStateCreateInfoEXT &
      setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
    {
      blendOverlap = blendOverlap_;
      return *this;
    }

    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
    }

    operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) &&
             ( dstPremultiplied == rhs.dstPremultiplied ) && ( blendOverlap == rhs.blendOverlap );
    }

    bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
    const void *                          pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32          srcPremultiplied = {};
    VULKAN_HPP_NAMESPACE::Bool32          dstPremultiplied = {};
    VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap     = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
  };
  static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) ==
                   sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineColorBlendAdvancedStateCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
  {
    using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
  };

  struct PipelineColorWriteCreateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT(
      uint32_t                             attachmentCount_    = {},
      const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {} ) VULKAN_HPP_NOEXCEPT
      : attachmentCount( attachmentCount_ )
      , pColorWriteEnables( pColorWriteEnables_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineColorWriteCreateInfoEXT( *reinterpret_cast<PipelineColorWriteCreateInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineColorWriteCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ )
      : attachmentCount( static_cast<uint32_t>( colorWriteEnables_.size() ) )
      , pColorWriteEnables( colorWriteEnables_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT &
                            operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    PipelineColorWriteCreateInfoEXT &
      setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT
    {
      pColorWriteEnables = pColorWriteEnables_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineColorWriteCreateInfoEXT & setColorWriteEnables(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables_ )
      VULKAN_HPP_NOEXCEPT
    {
      attachmentCount    = static_cast<uint32_t>( colorWriteEnables_.size() );
      pColorWriteEnables = colorWriteEnables_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineColorWriteCreateInfoEXT *>( this );
    }

    operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineColorWriteCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) &&
             ( pColorWriteEnables == rhs.pColorWriteEnables );
    }

    bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType  sType              = StructureType::ePipelineColorWriteCreateInfoEXT;
    const void *                         pNext              = {};
    uint32_t                             attachmentCount    = {};
    const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {};
  };
  static_assert( sizeof( PipelineColorWriteCreateInfoEXT ) == sizeof( VkPipelineColorWriteCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineColorWriteCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineColorWriteCreateInfoEXT>
  {
    using Type = PipelineColorWriteCreateInfoEXT;
  };

  struct PipelineCompilerControlCreateInfoAMD
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineCompilerControlCreateInfoAMD;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(
      VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT
      : compilerControlFlags( compilerControlFlags_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast<PipelineCompilerControlCreateInfoAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD &
                            operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCompilerControlCreateInfoAMD &
      operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>( &rhs );
      return *this;
    }

    PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags(
      VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      compilerControlFlags = compilerControlFlags_;
      return *this;
    }

    operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD *>( this );
    }

    operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default;
#else
    bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags );
    }

    bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                   sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
    const void *                                          pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {};
  };
  static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineCompilerControlCreateInfoAMD>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineCompilerControlCreateInfoAMD>
  {
    using Type = PipelineCompilerControlCreateInfoAMD;
  };

  struct PipelineCoverageModulationStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineCoverageModulationStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {},
      VULKAN_HPP_NAMESPACE::CoverageModulationModeNV                     coverageModulationMode_ =
        VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone,
      VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {},
      uint32_t                     coverageModulationTableCount_  = {},
      const float *                pCoverageModulationTable_      = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , coverageModulationMode( coverageModulationMode_ )
      , coverageModulationTableEnable( coverageModulationTableEnable_ )
      , coverageModulationTableCount( coverageModulationTableCount_ )
      , pCoverageModulationTable( pCoverageModulationTable_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(
      PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineCoverageModulationStateCreateInfoNV(
          *reinterpret_cast<PipelineCoverageModulationStateCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineCoverageModulationStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_,
      VULKAN_HPP_NAMESPACE::CoverageModulationModeNV                     coverageModulationMode_,
      VULKAN_HPP_NAMESPACE::Bool32                                       coverageModulationTableEnable_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ )
      : flags( flags_ )
      , coverageModulationMode( coverageModulationMode_ )
      , coverageModulationTableEnable( coverageModulationTableEnable_ )
      , coverageModulationTableCount( static_cast<uint32_t>( coverageModulationTable_.size() ) )
      , pCoverageModulationTable( coverageModulationTable_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV &
                            operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCoverageModulationStateCreateInfoNV &
      operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineCoverageModulationStateCreateInfoNV &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode(
      VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageModulationMode = coverageModulationMode_;
      return *this;
    }

    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable(
      VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageModulationTableEnable = coverageModulationTableEnable_;
      return *this;
    }

    PipelineCoverageModulationStateCreateInfoNV &
      setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageModulationTableCount = coverageModulationTableCount_;
      return *this;
    }

    PipelineCoverageModulationStateCreateInfoNV &
      setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
    {
      pCoverageModulationTable = pCoverageModulationTable_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageModulationTableCount = static_cast<uint32_t>( coverageModulationTable_.size() );
      pCoverageModulationTable     = coverageModulationTable_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( coverageModulationMode == rhs.coverageModulationMode ) &&
             ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) &&
             ( coverageModulationTableCount == rhs.coverageModulationTableCount ) &&
             ( pCoverageModulationTable == rhs.pCoverageModulationTable );
    }

    bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {};
    VULKAN_HPP_NAMESPACE::CoverageModulationModeNV                     coverageModulationMode =
      VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone;
    VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
    uint32_t                     coverageModulationTableCount  = {};
    const float *                pCoverageModulationTable      = {};
  };
  static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) ==
                   sizeof( VkPipelineCoverageModulationStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineCoverageModulationStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineCoverageModulationStateCreateInfoNV>
  {
    using Type = PipelineCoverageModulationStateCreateInfoNV;
  };

  struct PipelineCoverageReductionStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineCoverageReductionStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {},
      VULKAN_HPP_NAMESPACE::CoverageReductionModeNV                     coverageReductionMode_ =
        VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , coverageReductionMode( coverageReductionMode_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(
      PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineCoverageReductionStateCreateInfoNV(
          *reinterpret_cast<PipelineCoverageReductionStateCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV &
                            operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCoverageReductionStateCreateInfoNV &
      operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineCoverageReductionStateCreateInfoNV &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode(
      VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageReductionMode = coverageReductionMode_;
      return *this;
    }

    operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( coverageReductionMode == rhs.coverageReductionMode );
    }

    bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
    VULKAN_HPP_NAMESPACE::CoverageReductionModeNV                     coverageReductionMode =
      VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
  };
  static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) ==
                   sizeof( VkPipelineCoverageReductionStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineCoverageReductionStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineCoverageReductionStateCreateInfoNV>
  {
    using Type = PipelineCoverageReductionStateCreateInfoNV;
  };

  struct PipelineCoverageToColorStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineCoverageToColorStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_                 = {},
      VULKAN_HPP_NAMESPACE::Bool32                                    coverageToColorEnable_ = {},
      uint32_t coverageToColorLocation_                                                      = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , coverageToColorEnable( coverageToColorEnable_ )
      , coverageToColorLocation( coverageToColorLocation_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(
      PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineCoverageToColorStateCreateInfoNV(
          *reinterpret_cast<PipelineCoverageToColorStateCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV &
                            operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCoverageToColorStateCreateInfoNV &
      operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineCoverageToColorStateCreateInfoNV &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineCoverageToColorStateCreateInfoNV &
      setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageToColorEnable = coverageToColorEnable_;
      return *this;
    }

    PipelineCoverageToColorStateCreateInfoNV &
      setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
    {
      coverageToColorLocation = coverageToColorLocation_;
      return *this;
    }

    operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV *>( this );
    }

    operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( coverageToColorEnable == rhs.coverageToColorEnable ) &&
             ( coverageToColorLocation == rhs.coverageToColorLocation );
    }

    bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags                   = {};
    VULKAN_HPP_NAMESPACE::Bool32                                    coverageToColorEnable   = {};
    uint32_t                                                        coverageToColorLocation = {};
  };
  static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) ==
                   sizeof( VkPipelineCoverageToColorStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineCoverageToColorStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineCoverageToColorStateCreateInfoNV>
  {
    using Type = PipelineCoverageToColorStateCreateInfoNV;
  };

  struct PipelineCreationFeedbackEXT
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {},
                                   uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , duration( duration_ )
    {}

    VULKAN_HPP_CONSTEXPR
      PipelineCreationFeedbackEXT( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCreationFeedbackEXT( *reinterpret_cast<PipelineCreationFeedbackEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackEXT &
                            operator=( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCreationFeedbackEXT & operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT const *>( &rhs );
      return *this;
    }

    operator VkPipelineCreationFeedbackEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCreationFeedbackEXT *>( this );
    }

    operator VkPipelineCreationFeedbackEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCreationFeedbackEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCreationFeedbackEXT const & ) const = default;
#else
    bool operator==( PipelineCreationFeedbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( flags == rhs.flags ) && ( duration == rhs.duration );
    }

    bool operator!=( PipelineCreationFeedbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags    = {};
    uint64_t                                               duration = {};
  };
  static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineCreationFeedbackEXT>::value,
                 "struct wrapper is not a standard layout!" );

  struct PipelineCreationFeedbackCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineCreationFeedbackCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_          = {},
      uint32_t                                            pipelineStageCreationFeedbackCount_ = {},
      VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks_    = {} ) VULKAN_HPP_NOEXCEPT
      : pPipelineCreationFeedback( pPipelineCreationFeedback_ )
      , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ )
      , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( PipelineCreationFeedbackCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineCreationFeedbackCreateInfoEXT(
          *reinterpret_cast<PipelineCreationFeedbackCreateInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineCreationFeedbackCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT> const &
        pipelineStageCreationFeedbacks_ )
      : pPipelineCreationFeedback( pPipelineCreationFeedback_ )
      , pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) )
      , pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfoEXT &
                            operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineCreationFeedbackCreateInfoEXT &
      operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback(
      VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineCreationFeedback = pPipelineCreationFeedback_;
      return *this;
    }

    PipelineCreationFeedbackCreateInfoEXT &
      setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
      return *this;
    }

    PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks(
      VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
    {
      pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbacks(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT> const &
        pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
    {
      pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
      pPipelineStageCreationFeedbacks    = pipelineStageCreationFeedbacks_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfoEXT *>( this );
    }

    operator VkPipelineCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) &&
             ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) &&
             ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
    }

    bool operator!=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
    const void *                                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineCreationFeedback          = {};
    uint32_t                                            pipelineStageCreationFeedbackCount = {};
    VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT * pPipelineStageCreationFeedbacks    = {};
  };
  static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineCreationFeedbackCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfoEXT>
  {
    using Type = PipelineCreationFeedbackCreateInfoEXT;
  };

  struct PipelineDiscardRectangleStateCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {},
      VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT                     discardRectangleMode_ =
        VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive,
      uint32_t                             discardRectangleCount_ = {},
      const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_    = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , discardRectangleMode( discardRectangleMode_ )
      , discardRectangleCount( discardRectangleCount_ )
      , pDiscardRectangles( pDiscardRectangles_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(
      PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineDiscardRectangleStateCreateInfoEXT(
          *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineDiscardRectangleStateCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT                         flags_,
      VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT                                             discardRectangleMode_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ )
      : flags( flags_ )
      , discardRectangleMode( discardRectangleMode_ )
      , discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) )
      , pDiscardRectangles( discardRectangles_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT &
                            operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineDiscardRectangleStateCreateInfoEXT &
      operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineDiscardRectangleStateCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineDiscardRectangleStateCreateInfoEXT &
      setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
    {
      discardRectangleMode = discardRectangleMode_;
      return *this;
    }

    PipelineDiscardRectangleStateCreateInfoEXT &
      setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      discardRectangleCount = discardRectangleCount_;
      return *this;
    }

    PipelineDiscardRectangleStateCreateInfoEXT &
      setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
    {
      pDiscardRectangles = pDiscardRectangles_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ )
      VULKAN_HPP_NOEXCEPT
    {
      discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
      pDiscardRectangles    = discardRectangles_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
    }

    operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( discardRectangleMode == rhs.discardRectangleMode ) &&
             ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles );
    }

    bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
    VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT                     discardRectangleMode =
      VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
    uint32_t                             discardRectangleCount = {};
    const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles    = {};
  };
  static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) ==
                   sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineDiscardRectangleStateCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
  {
    using Type = PipelineDiscardRectangleStateCreateInfoEXT;
  };

  struct PipelineFragmentShadingRateEnumStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ =
        VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize,
      VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ =
        VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel,
      std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &
        combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep,
                           VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT
      : shadingRateType( shadingRateType_ )
      , shadingRate( shadingRate_ )
      , combinerOps( combinerOps_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(
      PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineFragmentShadingRateEnumStateCreateInfoNV(
          *reinterpret_cast<PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV &
                            operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineFragmentShadingRateEnumStateCreateInfoNV &
      operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineFragmentShadingRateEnumStateCreateInfoNV &
      setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateType = shadingRateType_;
      return *this;
    }

    PipelineFragmentShadingRateEnumStateCreateInfoNV &
      setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRate = shadingRate_;
      return *this;
    }

    PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps(
      std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
    {
      combinerOps = combinerOps_;
      return *this;
    }

    operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineFragmentShadingRateEnumStateCreateInfoNV *>( this );
    }

    operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineFragmentShadingRateEnumStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) &&
             ( shadingRate == rhs.shadingRate ) && ( combinerOps == rhs.combinerOps );
    }

    bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType =
      VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize;
    VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate =
      VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel;
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
  };
  static_assert( sizeof( PipelineFragmentShadingRateEnumStateCreateInfoNV ) ==
                   sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineFragmentShadingRateEnumStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV>
  {
    using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV;
  };

  struct PipelineFragmentShadingRateStateCreateInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(
      VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {},
      std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> const &
        combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep,
                           VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } } ) VULKAN_HPP_NOEXCEPT
      : fragmentSize( fragmentSize_ )
      , combinerOps( combinerOps_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(
      PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineFragmentShadingRateStateCreateInfoKHR(
          *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR &
                            operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineFragmentShadingRateStateCreateInfoKHR &
      operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs );
      return *this;
    }

    PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineFragmentShadingRateStateCreateInfoKHR &
      setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentSize = fragmentSize_;
      return *this;
    }

    PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps(
      std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
    {
      combinerOps = combinerOps_;
      return *this;
    }

    operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
    }

    operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default;
#else
    bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) &&
             ( combinerOps == rhs.combinerOps );
    }

    bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType        = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
    const void *                        pNext        = {};
    VULKAN_HPP_NAMESPACE::Extent2D      fragmentSize = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
  };
  static_assert( sizeof( PipelineFragmentShadingRateStateCreateInfoKHR ) ==
                   sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineFragmentShadingRateStateCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR>
  {
    using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
  };

  struct PipelineRasterizationConservativeStateCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {},
      VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT                     conservativeRasterizationMode_ =
        VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled,
      float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , conservativeRasterizationMode( conservativeRasterizationMode_ )
      , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(
      PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationConservativeStateCreateInfoEXT(
      VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationConservativeStateCreateInfoEXT(
          *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT &
                            operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationConservativeStateCreateInfoEXT &
      operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineRasterizationConservativeStateCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode(
      VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
    {
      conservativeRasterizationMode = conservativeRasterizationMode_;
      return *this;
    }

    PipelineRasterizationConservativeStateCreateInfoEXT &
      setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
    {
      extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
      return *this;
    }

    operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) &&
             ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
    }

    bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
    VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT                     conservativeRasterizationMode =
      VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
    float extraPrimitiveOverestimationSize = {};
  };
  static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) ==
                   sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineRasterizationConservativeStateCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
  };

  struct PipelineRasterizationDepthClipStateCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {},
      VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_                                  = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , depthClipEnable( depthClipEnable_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(
      PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationDepthClipStateCreateInfoEXT(
          *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT &
                            operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationDepthClipStateCreateInfoEXT &
      operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineRasterizationDepthClipStateCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineRasterizationDepthClipStateCreateInfoEXT &
      setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      depthClipEnable = depthClipEnable_;
      return *this;
    }

    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( depthClipEnable == rhs.depthClipEnable );
    }

    bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags           = {};
    VULKAN_HPP_NAMESPACE::Bool32                                            depthClipEnable = {};
  };
  static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) ==
                   sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineRasterizationDepthClipStateCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
  };

  struct PipelineRasterizationLineStateCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineRasterizationLineStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ =
        VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault,
      VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {},
      uint32_t                     lineStippleFactor_  = {},
      uint16_t                     lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT
      : lineRasterizationMode( lineRasterizationMode_ )
      , stippledLineEnable( stippledLineEnable_ )
      , lineStippleFactor( lineStippleFactor_ )
      , lineStipplePattern( lineStipplePattern_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(
      PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationLineStateCreateInfoEXT(
          *reinterpret_cast<PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT &
                            operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationLineStateCreateInfoEXT &
      operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode(
      VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
    {
      lineRasterizationMode = lineRasterizationMode_;
      return *this;
    }

    PipelineRasterizationLineStateCreateInfoEXT &
      setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      stippledLineEnable = stippledLineEnable_;
      return *this;
    }

    PipelineRasterizationLineStateCreateInfoEXT &
      setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
    {
      lineStippleFactor = lineStippleFactor_;
      return *this;
    }

    PipelineRasterizationLineStateCreateInfoEXT &
      setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
    {
      lineStipplePattern = lineStipplePattern_;
      return *this;
    }

    operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( lineRasterizationMode == rhs.lineRasterizationMode ) &&
             ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) &&
             ( lineStipplePattern == rhs.lineStipplePattern );
    }

    bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
    const void *                                   pNext = {};
    VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode =
      VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault;
    VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
    uint32_t                     lineStippleFactor  = {};
    uint16_t                     lineStipplePattern = {};
  };
  static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) ==
                   sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineRasterizationLineStateCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationLineStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationLineStateCreateInfoEXT;
  };

  struct PipelineRasterizationProvokingVertexStateCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ =
        VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex ) VULKAN_HPP_NOEXCEPT
      : provokingVertexMode( provokingVertexMode_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT(
      PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationProvokingVertexStateCreateInfoEXT(
      VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationProvokingVertexStateCreateInfoEXT(
          *reinterpret_cast<PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT &
                            operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationProvokingVertexStateCreateInfoEXT &
      operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineRasterizationProvokingVertexStateCreateInfoEXT &
      setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT
    {
      provokingVertexMode = provokingVertexMode_;
      return *this;
    }

    operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode );
    }

    bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode =
      VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex;
  };
  static_assert( sizeof( PipelineRasterizationProvokingVertexStateCreateInfoEXT ) ==
                   sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT>
  {
    using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT;
  };

  struct PipelineRasterizationStateRasterizationOrderAMD
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineRasterizationStateRasterizationOrderAMD;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(
      VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ =
        VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT
      : rasterizationOrder( rasterizationOrder_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(
      PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationStateRasterizationOrderAMD(
          *reinterpret_cast<PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD &
                            operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationStateRasterizationOrderAMD &
      operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs );
      return *this;
    }

    PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineRasterizationStateRasterizationOrderAMD &
      setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationOrder = rasterizationOrder_;
      return *this;
    }

    operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD *>( this );
    }

    operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default;
#else
    bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder );
    }

    bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType         sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
    const void *                                pNext = {};
    VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder =
      VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
  };
  static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) ==
                   sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineRasterizationStateRasterizationOrderAMD>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationStateRasterizationOrderAMD>
  {
    using Type = PipelineRasterizationStateRasterizationOrderAMD;
  };

  struct PipelineRasterizationStateStreamCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {},
      uint32_t rasterizationStream_                                               = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , rasterizationStream( rasterizationStream_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(
      PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineRasterizationStateStreamCreateInfoEXT(
          *reinterpret_cast<PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT &
                            operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRasterizationStateStreamCreateInfoEXT &
      operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineRasterizationStateStreamCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineRasterizationStateStreamCreateInfoEXT &
      setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
    {
      rasterizationStream = rasterizationStream_;
      return *this;
    }

    operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT *>( this );
    }

    operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( rasterizationStream == rhs.rasterizationStream );
    }

    bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags               = {};
    uint32_t                                                             rasterizationStream = {};
  };
  static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) ==
                   sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineRasterizationStateStreamCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineRasterizationStateStreamCreateInfoEXT>
  {
    using Type = PipelineRasterizationStateStreamCreateInfoEXT;
  };

  struct PipelineRepresentativeFragmentTestStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT
      : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(
      PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRepresentativeFragmentTestStateCreateInfoNV(
      VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineRepresentativeFragmentTestStateCreateInfoNV(
          *reinterpret_cast<PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV &
                            operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineRepresentativeFragmentTestStateCreateInfoNV &
      operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable(
      VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      representativeFragmentTestEnable = representativeFragmentTestEnable_;
      return *this;
    }

    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>( this );
    }

    operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
    }

    bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        representativeFragmentTestEnable = {};
  };
  static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) ==
                   sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineRepresentativeFragmentTestStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV>
  {
    using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
  };

  struct PipelineSampleLocationsStateCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineSampleLocationsStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::Bool32                 sampleLocationsEnable_ = {},
      VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_   = {} ) VULKAN_HPP_NOEXCEPT
      : sampleLocationsEnable( sampleLocationsEnable_ )
      , sampleLocationsInfo( sampleLocationsInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(
      PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineSampleLocationsStateCreateInfoEXT(
          *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT &
                            operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineSampleLocationsStateCreateInfoEXT &
      operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineSampleLocationsStateCreateInfoEXT &
      setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsEnable = sampleLocationsEnable_;
      return *this;
    }

    PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo(
      VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsInfo = sampleLocationsInfo_;
      return *this;
    }

    operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
    }

    operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( sampleLocationsEnable == rhs.sampleLocationsEnable ) &&
             ( sampleLocationsInfo == rhs.sampleLocationsInfo );
    }

    bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
    const void *                                 pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32                 sampleLocationsEnable = {};
    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo   = {};
  };
  static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) ==
                   sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineSampleLocationsStateCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
  {
    using Type = PipelineSampleLocationsStateCreateInfoEXT;
  };

  struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : requiredSubgroupSize( requiredSubgroupSize_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(
      PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(
      VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(
          *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &
                            operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &
      operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>( &rhs );
      return *this;
    }

    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *>( this );
    }

    operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize );
    }

    bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
    void *                              pNext = {};
    uint32_t                            requiredSubgroupSize = {};
  };
  static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) ==
                   sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>
  {
    using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
  };

  struct PipelineTessellationDomainOriginStateCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineTessellationDomainOriginStateCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(
      VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ =
        VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT : domainOrigin( domainOrigin_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(
      PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineTessellationDomainOriginStateCreateInfo(
          *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo &
                            operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineTessellationDomainOriginStateCreateInfo &
      operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
      return *this;
    }

    PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineTessellationDomainOriginStateCreateInfo &
      setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
    {
      domainOrigin = domainOrigin_;
      return *this;
    }

    operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
    }

    operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default;
#else
    bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin );
    }

    bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin =
      VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
  };
  static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) ==
                   sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineTessellationDomainOriginStateCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
  {
    using Type = PipelineTessellationDomainOriginStateCreateInfo;
  };
  using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;

  struct VertexInputBindingDivisorDescriptionEXT
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {},
                                                                  uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT
      : binding( binding_ )
      , divisor( divisor_ )
    {}

    VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VertexInputBindingDivisorDescriptionEXT(
          *reinterpret_cast<VertexInputBindingDivisorDescriptionEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT &
                            operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VertexInputBindingDivisorDescriptionEXT &
      operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>( &rhs );
      return *this;
    }

    VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
    {
      binding = binding_;
      return *this;
    }

    VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
    {
      divisor = divisor_;
      return *this;
    }

    operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT *>( this );
    }

    operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VertexInputBindingDivisorDescriptionEXT const & ) const = default;
#else
    bool operator==( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( binding == rhs.binding ) && ( divisor == rhs.divisor );
    }

    bool operator!=( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t binding = {};
    uint32_t divisor = {};
  };
  static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) ==
                   sizeof( VkVertexInputBindingDivisorDescriptionEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VertexInputBindingDivisorDescriptionEXT>::value,
                 "struct wrapper is not a standard layout!" );

  struct PipelineVertexInputDivisorStateCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(
      uint32_t                                                              vertexBindingDivisorCount_ = {},
      const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_    = {} )
      VULKAN_HPP_NOEXCEPT
      : vertexBindingDivisorCount( vertexBindingDivisorCount_ )
      , pVertexBindingDivisors( pVertexBindingDivisors_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(
      PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineVertexInputDivisorStateCreateInfoEXT(
          *reinterpret_cast<PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputDivisorStateCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ )
      : vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) )
      , pVertexBindingDivisors( vertexBindingDivisors_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT &
                            operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineVertexInputDivisorStateCreateInfoEXT &
      operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs );
      return *this;
    }

    PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineVertexInputDivisorStateCreateInfoEXT &
      setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      vertexBindingDivisorCount = vertexBindingDivisorCount_;
      return *this;
    }

    PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors(
      const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ )
      VULKAN_HPP_NOEXCEPT
    {
      pVertexBindingDivisors = pVertexBindingDivisors_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<
        const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ )
      VULKAN_HPP_NOEXCEPT
    {
      vertexBindingDivisorCount = static_cast<uint32_t>( vertexBindingDivisors_.size() );
      pVertexBindingDivisors    = vertexBindingDivisors_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT *>( this );
    }

    operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const & ) const = default;
#else
    bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) &&
             ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
    }

    bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
    const void *                        pNext = {};
    uint32_t                            vertexBindingDivisorCount                                = {};
    const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {};
  };
  static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) ==
                   sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineVertexInputDivisorStateCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT>
  {
    using Type = PipelineVertexInputDivisorStateCreateInfoEXT;
  };

  struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ =
        VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault,
      uint32_t                                                customSampleOrderCount_ = {},
      const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_    = {} ) VULKAN_HPP_NOEXCEPT
      : sampleOrderType( sampleOrderType_ )
      , customSampleOrderCount( customSampleOrderCount_ )
      , pCustomSampleOrders( pCustomSampleOrders_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(
      PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportCoarseSampleOrderStateCreateInfoNV(
      VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : PipelineViewportCoarseSampleOrderStateCreateInfoNV(
          *reinterpret_cast<PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportCoarseSampleOrderStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const &
        customSampleOrders_ )
      : sampleOrderType( sampleOrderType_ )
      , customSampleOrderCount( static_cast<uint32_t>( customSampleOrders_.size() ) )
      , pCustomSampleOrders( customSampleOrders_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV &
                            operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportCoarseSampleOrderStateCreateInfoNV &
      operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineViewportCoarseSampleOrderStateCreateInfoNV &
      setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleOrderType = sampleOrderType_;
      return *this;
    }

    PipelineViewportCoarseSampleOrderStateCreateInfoNV &
      setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT
    {
      customSampleOrderCount = customSampleOrderCount_;
      return *this;
    }

    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders(
      const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
    {
      pCustomSampleOrders = pCustomSampleOrders_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const &
        customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
    {
      customSampleOrderCount = static_cast<uint32_t>( customSampleOrders_.size() );
      pCustomSampleOrders    = customSampleOrders_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) &&
             ( customSampleOrderCount == rhs.customSampleOrderCount ) &&
             ( pCustomSampleOrders == rhs.pCustomSampleOrders );
    }

    bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType =
      VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
    uint32_t                                                customSampleOrderCount = {};
    const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders    = {};
  };
  static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) ==
                   sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV>
  {
    using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
  };

  struct PipelineViewportExclusiveScissorStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(
      uint32_t                             exclusiveScissorCount_ = {},
      const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_    = {} ) VULKAN_HPP_NOEXCEPT
      : exclusiveScissorCount( exclusiveScissorCount_ )
      , pExclusiveScissors( pExclusiveScissors_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(
      PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineViewportExclusiveScissorStateCreateInfoNV(
          *reinterpret_cast<PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportExclusiveScissorStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ )
      : exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) )
      , pExclusiveScissors( exclusiveScissors_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV &
                            operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportExclusiveScissorStateCreateInfoNV &
      operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineViewportExclusiveScissorStateCreateInfoNV &
      setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
    {
      exclusiveScissorCount = exclusiveScissorCount_;
      return *this;
    }

    PipelineViewportExclusiveScissorStateCreateInfoNV &
      setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
    {
      pExclusiveScissors = pExclusiveScissors_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ )
      VULKAN_HPP_NOEXCEPT
    {
      exclusiveScissorCount = static_cast<uint32_t>( exclusiveScissors_.size() );
      pExclusiveScissors    = exclusiveScissors_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && ( pExclusiveScissors == rhs.pExclusiveScissors );
    }

    bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType  sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
    const void *                         pNext = {};
    uint32_t                             exclusiveScissorCount = {};
    const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors    = {};
  };
  static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) ==
                   sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineViewportExclusiveScissorStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV>
  {
    using Type = PipelineViewportExclusiveScissorStateCreateInfoNV;
  };

  struct PipelineViewportShadingRateImageStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::Bool32                       shadingRateImageEnable_ = {},
      uint32_t                                           viewportCount_          = {},
      const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_   = {} ) VULKAN_HPP_NOEXCEPT
      : shadingRateImageEnable( shadingRateImageEnable_ )
      , viewportCount( viewportCount_ )
      , pShadingRatePalettes( pShadingRatePalettes_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(
      PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineViewportShadingRateImageStateCreateInfoNV(
          *reinterpret_cast<PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportShadingRateImageStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const &
        shadingRatePalettes_ )
      : shadingRateImageEnable( shadingRateImageEnable_ )
      , viewportCount( static_cast<uint32_t>( shadingRatePalettes_.size() ) )
      , pShadingRatePalettes( shadingRatePalettes_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV &
                            operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportShadingRateImageStateCreateInfoNV &
      operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this =
        *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineViewportShadingRateImageStateCreateInfoNV &
      setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      shadingRateImageEnable = shadingRateImageEnable_;
      return *this;
    }

    PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount = viewportCount_;
      return *this;
    }

    PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes(
      const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
    {
      pShadingRatePalettes = pShadingRatePalettes_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const &
        shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount        = static_cast<uint32_t>( shadingRatePalettes_.size() );
      pShadingRatePalettes = shadingRatePalettes_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && ( viewportCount == rhs.viewportCount ) &&
             ( pShadingRatePalettes == rhs.pShadingRatePalettes );
    }

    bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        shadingRateImageEnable              = {};
    uint32_t                            viewportCount                       = {};
    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {};
  };
  static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) ==
                   sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineViewportShadingRateImageStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV>
  {
    using Type = PipelineViewportShadingRateImageStateCreateInfoNV;
  };

  struct ViewportSwizzleNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ =
                           VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
                         VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ =
                           VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
                         VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ =
                           VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX,
                         VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ =
                           VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT
      : x( x_ )
      , y( y_ )
      , z( z_ )
      , w( w_ )
    {}

    VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : ViewportSwizzleNV( *reinterpret_cast<ViewportSwizzleNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV &
                            operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>( &rhs );
      return *this;
    }

    ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
    {
      x = x_;
      return *this;
    }

    ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
    {
      y = y_;
      return *this;
    }

    ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
    {
      z = z_;
      return *this;
    }

    ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
    {
      w = w_;
      return *this;
    }

    operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkViewportSwizzleNV *>( this );
    }

    operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkViewportSwizzleNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ViewportSwizzleNV const & ) const = default;
#else
    bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w );
    }

    bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
    VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
  };
  static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );

  struct PipelineViewportSwizzleStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineViewportSwizzleStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_             = {},
      uint32_t                                                        viewportCount_     = {},
      const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV *                 pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , viewportCount( viewportCount_ )
      , pViewportSwizzles( pViewportSwizzles_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(
      PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineViewportSwizzleStateCreateInfoNV(
          *reinterpret_cast<PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportSwizzleStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const &
        viewportSwizzles_ )
      : flags( flags_ )
      , viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) )
      , pViewportSwizzles( viewportSwizzles_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV &
                            operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportSwizzleStateCreateInfoNV &
      operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineViewportSwizzleStateCreateInfoNV &
      setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount = viewportCount_;
      return *this;
    }

    PipelineViewportSwizzleStateCreateInfoNV &
      setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewportSwizzles = pViewportSwizzles_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const &
        viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount     = static_cast<uint32_t>( viewportSwizzles_.size() );
      pViewportSwizzles = viewportSwizzles_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( viewportCount == rhs.viewportCount ) && ( pViewportSwizzles == rhs.pViewportSwizzles );
    }

    bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags             = {};
    uint32_t                                                        viewportCount     = {};
    const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV *                 pViewportSwizzles = {};
  };
  static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) ==
                   sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineViewportSwizzleStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportSwizzleStateCreateInfoNV>
  {
    using Type = PipelineViewportSwizzleStateCreateInfoNV;
  };

  struct PipelineViewportWScalingStateCreateInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::ePipelineViewportWScalingStateCreateInfoNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::Bool32                     viewportWScalingEnable_ = {},
      uint32_t                                         viewportCount_          = {},
      const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_     = {} ) VULKAN_HPP_NOEXCEPT
      : viewportWScalingEnable( viewportWScalingEnable_ )
      , viewportCount( viewportCount_ )
      , pViewportWScalings( pViewportWScalings_ )
    {}

    VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(
      PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : PipelineViewportWScalingStateCreateInfoNV(
          *reinterpret_cast<PipelineViewportWScalingStateCreateInfoNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportWScalingStateCreateInfoNV(
      VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const &
        viewportWScalings_ )
      : viewportWScalingEnable( viewportWScalingEnable_ )
      , viewportCount( static_cast<uint32_t>( viewportWScalings_.size() ) )
      , pViewportWScalings( viewportWScalings_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV &
                            operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PipelineViewportWScalingStateCreateInfoNV &
      operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>( &rhs );
      return *this;
    }

    PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PipelineViewportWScalingStateCreateInfoNV &
      setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportWScalingEnable = viewportWScalingEnable_;
      return *this;
    }

    PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount = viewportCount_;
      return *this;
    }

    PipelineViewportWScalingStateCreateInfoNV &
      setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewportWScalings = pViewportWScalings_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const &
        viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
    {
      viewportCount      = static_cast<uint32_t>( viewportWScalings_.size() );
      pViewportWScalings = viewportWScalings_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV *>( this );
    }

    operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default;
#else
    bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && ( viewportCount == rhs.viewportCount ) &&
             ( pViewportWScalings == rhs.pViewportWScalings );
    }

    bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
    const void *                                     pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32                     viewportWScalingEnable = {};
    uint32_t                                         viewportCount          = {};
    const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings     = {};
  };
  static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) ==
                   sizeof( VkPipelineViewportWScalingStateCreateInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PipelineViewportWScalingStateCreateInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePipelineViewportWScalingStateCreateInfoNV>
  {
    using Type = PipelineViewportWScalingStateCreateInfoNV;
  };

#if defined( VK_USE_PLATFORM_GGP )
  struct PresentFrameTokenGGP
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {} ) VULKAN_HPP_NOEXCEPT
      : frameToken( frameToken_ )
    {}

    VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentFrameTokenGGP( *reinterpret_cast<PresentFrameTokenGGP const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP &
                            operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>( &rhs );
      return *this;
    }

    PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT
    {
      frameToken = frameToken_;
      return *this;
    }

    operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentFrameTokenGGP *>( this );
    }

    operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentFrameTokenGGP *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentFrameTokenGGP const & ) const = default;
#  else
    bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
    }

    bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType      = StructureType::ePresentFrameTokenGGP;
    const void *                        pNext      = {};
    GgpFrameToken                       frameToken = {};
  };
  static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePresentFrameTokenGGP>
  {
    using Type = PresentFrameTokenGGP;
  };
#endif /*VK_USE_PLATFORM_GGP*/

  struct RectLayerKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {},
                                       VULKAN_HPP_NAMESPACE::Extent2D extent_ = {},
                                       uint32_t                       layer_  = {} ) VULKAN_HPP_NOEXCEPT
      : offset( offset_ )
      , extent( extent_ )
      , layer( layer_ )
    {}

    VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : RectLayerKHR( *reinterpret_cast<RectLayerKHR const *>( &rhs ) )
    {}

    explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} )
      : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>( &rhs );
      return *this;
    }

    RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
    {
      offset = offset_;
      return *this;
    }

    RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
    {
      extent = extent_;
      return *this;
    }

    RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT
    {
      layer = layer_;
      return *this;
    }

    operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRectLayerKHR *>( this );
    }

    operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRectLayerKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RectLayerKHR const & ) const = default;
#else
    bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer );
    }

    bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::Offset2D offset = {};
    VULKAN_HPP_NAMESPACE::Extent2D extent = {};
    uint32_t                       layer  = {};
  };
  static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RectLayerKHR>::value, "struct wrapper is not a standard layout!" );

  struct PresentRegionKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PresentRegionKHR( uint32_t                                   rectangleCount_ = {},
                        const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_    = {} ) VULKAN_HPP_NOEXCEPT
      : rectangleCount( rectangleCount_ )
      , pRectangles( pRectangles_ )
    {}

    VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentRegionKHR( *reinterpret_cast<PresentRegionKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentRegionKHR(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
      : rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>( &rhs );
      return *this;
    }

    PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT
    {
      rectangleCount = rectangleCount_;
      return *this;
    }

    PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT
    {
      pRectangles = pRectangles_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentRegionKHR & setRectangles(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
      VULKAN_HPP_NOEXCEPT
    {
      rectangleCount = static_cast<uint32_t>( rectangles_.size() );
      pRectangles    = rectangles_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentRegionKHR *>( this );
    }

    operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentRegionKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentRegionKHR const & ) const = default;
#else
    bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles );
    }

    bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                   rectangleCount = {};
    const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles    = {};
  };
  static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );

  struct PresentRegionsKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PresentRegionsKHR( uint32_t                                       swapchainCount_ = {},
                         const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_       = {} ) VULKAN_HPP_NOEXCEPT
      : swapchainCount( swapchainCount_ )
      , pRegions( pRegions_ )
    {}

    VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentRegionsKHR( *reinterpret_cast<PresentRegionsKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentRegionsKHR(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ )
      : swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR &
                            operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>( &rhs );
      return *this;
    }

    PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT
    {
      pRegions = pRegions_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentRegionsKHR & setRegions(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ )
      VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( regions_.size() );
      pRegions       = regions_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentRegionsKHR *>( this );
    }

    operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentRegionsKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentRegionsKHR const & ) const = default;
#else
    bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) &&
             ( pRegions == rhs.pRegions );
    }

    bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType          = StructureType::ePresentRegionsKHR;
    const void *                                   pNext          = {};
    uint32_t                                       swapchainCount = {};
    const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions       = {};
  };
  static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePresentRegionsKHR>
  {
    using Type = PresentRegionsKHR;
  };

  struct PresentTimeGOOGLE
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_          = {},
                                            uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT
      : presentID( presentID_ )
      , desiredPresentTime( desiredPresentTime_ )
    {}

    VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentTimeGOOGLE( *reinterpret_cast<PresentTimeGOOGLE const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE &
                            operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>( &rhs );
      return *this;
    }

    PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT
    {
      presentID = presentID_;
      return *this;
    }

    PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
    {
      desiredPresentTime = desiredPresentTime_;
      return *this;
    }

    operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentTimeGOOGLE *>( this );
    }

    operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentTimeGOOGLE *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentTimeGOOGLE const & ) const = default;
#else
    bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime );
    }

    bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t presentID          = {};
    uint64_t desiredPresentTime = {};
  };
  static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );

  struct PresentTimesInfoGOOGLE
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      PresentTimesInfoGOOGLE( uint32_t                                        swapchainCount_ = {},
                              const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_         = {} ) VULKAN_HPP_NOEXCEPT
      : swapchainCount( swapchainCount_ )
      , pTimes( pTimes_ )
    {}

    VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
      : PresentTimesInfoGOOGLE( *reinterpret_cast<PresentTimesInfoGOOGLE const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentTimesInfoGOOGLE(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ )
      : swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE &
                            operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>( &rhs );
      return *this;
    }

    PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = swapchainCount_;
      return *this;
    }

    PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT
    {
      pTimes = pTimes_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    PresentTimesInfoGOOGLE & setTimes(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ )
      VULKAN_HPP_NOEXCEPT
    {
      swapchainCount = static_cast<uint32_t>( times_.size() );
      pTimes         = times_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkPresentTimesInfoGOOGLE *>( this );
    }

    operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkPresentTimesInfoGOOGLE *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default;
#else
    bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) &&
             ( pTimes == rhs.pTimes );
    }

    bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType             sType          = StructureType::ePresentTimesInfoGOOGLE;
    const void *                                    pNext          = {};
    uint32_t                                        swapchainCount = {};
    const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes         = {};
  };
  static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::ePresentTimesInfoGOOGLE>
  {
    using Type = PresentTimesInfoGOOGLE;
  };

  struct ProtectedSubmitInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {} ) VULKAN_HPP_NOEXCEPT
      : protectedSubmit( protectedSubmit_ )
    {}

    VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : ProtectedSubmitInfo( *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo &
                            operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>( &rhs );
      return *this;
    }

    ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
    {
      protectedSubmit = protectedSubmit_;
      return *this;
    }

    operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkProtectedSubmitInfo *>( this );
    }

    operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkProtectedSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ProtectedSubmitInfo const & ) const = default;
#else
    bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit );
    }

    bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::eProtectedSubmitInfo;
    const void *                        pNext           = {};
    VULKAN_HPP_NAMESPACE::Bool32        protectedSubmit = {};
  };
  static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
  {
    using Type = ProtectedSubmitInfo;
  };

  struct QueryPoolPerformanceQueryCreateInfoINTEL
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(
      VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ =
        VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT
      : performanceCountersSampling( performanceCountersSampling_ )
    {}

    VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(
      QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs )
      VULKAN_HPP_NOEXCEPT
      : QueryPoolPerformanceQueryCreateInfoINTEL(
          *reinterpret_cast<QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL &
                            operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueryPoolPerformanceQueryCreateInfoINTEL &
      operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs );
      return *this;
    }

    QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling(
      VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
    {
      performanceCountersSampling = performanceCountersSampling_;
      return *this;
    }

    operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL *>( this );
    }

    operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default;
#else
    bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( performanceCountersSampling == rhs.performanceCountersSampling );
    }

    bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
    const void *                                     pNext = {};
    VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling =
      VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
  };
  static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) ==
                   sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<QueryPoolPerformanceQueryCreateInfoINTEL>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL>
  {
    using Type = QueryPoolPerformanceQueryCreateInfoINTEL;
  };
  using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;

  struct QueueFamilyCheckpointProperties2NV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eQueueFamilyCheckpointProperties2NV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV(
      VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
    {}

    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyCheckpointProperties2NV( *reinterpret_cast<QueueFamilyCheckpointProperties2NV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyCheckpointProperties2NV &
                            operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyCheckpointProperties2NV &
      operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyCheckpointProperties2NV *>( this );
    }

    operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyCheckpointProperties2NV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default;
#else
    bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
    }

    bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType = StructureType::eQueueFamilyCheckpointProperties2NV;
    void *                                       pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR checkpointExecutionStageMask = {};
  };
  static_assert( sizeof( QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<QueueFamilyCheckpointProperties2NV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyCheckpointProperties2NV>
  {
    using Type = QueueFamilyCheckpointProperties2NV;
  };

  struct QueueFamilyCheckpointPropertiesNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eQueueFamilyCheckpointPropertiesNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(
      VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT
      : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
    {}

    VULKAN_HPP_CONSTEXPR
      QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast<QueueFamilyCheckpointPropertiesNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyCheckpointPropertiesNV &
                            operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>( &rhs );
      return *this;
    }

    operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV *>( this );
    }

    operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default;
#else
    bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
    }

    bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
    void *                                   pNext = {};
    VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {};
  };
  static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<QueueFamilyCheckpointPropertiesNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyCheckpointPropertiesNV>
  {
    using Type = QueueFamilyCheckpointPropertiesNV;
  };

  struct QueueFamilyGlobalPriorityPropertiesEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eQueueFamilyGlobalPriorityPropertiesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesEXT(
      uint32_t priorityCount_ = {},
      std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT, VK_MAX_GLOBAL_PRIORITY_SIZE_EXT> const &
        priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow,
                          VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow } } ) VULKAN_HPP_NOEXCEPT
      : priorityCount( priorityCount_ )
      , priorities( priorities_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesEXT( QueueFamilyGlobalPriorityPropertiesEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyGlobalPriorityPropertiesEXT( VkQueueFamilyGlobalPriorityPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : QueueFamilyGlobalPriorityPropertiesEXT(
          *reinterpret_cast<QueueFamilyGlobalPriorityPropertiesEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesEXT &
                            operator=( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    QueueFamilyGlobalPriorityPropertiesEXT &
      operator=( VkQueueFamilyGlobalPriorityPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesEXT const *>( &rhs );
      return *this;
    }

    QueueFamilyGlobalPriorityPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    QueueFamilyGlobalPriorityPropertiesEXT & setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT
    {
      priorityCount = priorityCount_;
      return *this;
    }

    QueueFamilyGlobalPriorityPropertiesEXT & setPriorities(
      std::array<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT, VK_MAX_GLOBAL_PRIORITY_SIZE_EXT> priorities_ )
      VULKAN_HPP_NOEXCEPT
    {
      priorities = priorities_;
      return *this;
    }

    operator VkQueueFamilyGlobalPriorityPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkQueueFamilyGlobalPriorityPropertiesEXT *>( this );
    }

    operator VkQueueFamilyGlobalPriorityPropertiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkQueueFamilyGlobalPriorityPropertiesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( QueueFamilyGlobalPriorityPropertiesEXT const & ) const = default;
#else
    bool operator==( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) &&
             ( priorities == rhs.priorities );
    }

    bool operator!=( QueueFamilyGlobalPriorityPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::eQueueFamilyGlobalPriorityPropertiesEXT;
    void *                              pNext         = {};
    uint32_t                            priorityCount = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT, VK_MAX_GLOBAL_PRIORITY_SIZE_EXT>
      priorities = {};
  };
  static_assert( sizeof( QueueFamilyGlobalPriorityPropertiesEXT ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<QueueFamilyGlobalPriorityPropertiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eQueueFamilyGlobalPriorityPropertiesEXT>
  {
    using Type = QueueFamilyGlobalPriorityPropertiesEXT;
  };

  struct RenderPassAttachmentBeginInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      RenderPassAttachmentBeginInfo( uint32_t                                attachmentCount_ = {},
                                     const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_    = {} ) VULKAN_HPP_NOEXCEPT
      : attachmentCount( attachmentCount_ )
      , pAttachments( pAttachments_ )
    {}

    VULKAN_HPP_CONSTEXPR
      RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassAttachmentBeginInfo( *reinterpret_cast<RenderPassAttachmentBeginInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassAttachmentBeginInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
      : attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo &
                            operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>( &rhs );
      return *this;
    }

    RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = attachmentCount_;
      return *this;
    }

    RenderPassAttachmentBeginInfo &
      setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT
    {
      pAttachments = pAttachments_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassAttachmentBeginInfo & setAttachments(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
      VULKAN_HPP_NOEXCEPT
    {
      attachmentCount = static_cast<uint32_t>( attachments_.size() );
      pAttachments    = attachments_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo *>( this );
    }

    operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassAttachmentBeginInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default;
#else
    bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) &&
             ( pAttachments == rhs.pAttachments );
    }

    bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType     sType           = StructureType::eRenderPassAttachmentBeginInfo;
    const void *                            pNext           = {};
    uint32_t                                attachmentCount = {};
    const VULKAN_HPP_NAMESPACE::ImageView * pAttachments    = {};
  };
  static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RenderPassAttachmentBeginInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
  {
    using Type = RenderPassAttachmentBeginInfo;
  };
  using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;

  struct RenderPassFragmentDensityMapCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
      : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
    {}

    VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(
      RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : RenderPassFragmentDensityMapCreateInfoEXT(
          *reinterpret_cast<RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT &
                            operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassFragmentDensityMapCreateInfoEXT &
      operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs );
      return *this;
    }

    RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment(
      VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
      return *this;
    }

    operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT *>( this );
    }

    operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default;
#else
    bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
    }

    bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
    const void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {};
  };
  static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) ==
                   sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RenderPassFragmentDensityMapCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapCreateInfoEXT>
  {
    using Type = RenderPassFragmentDensityMapCreateInfoEXT;
  };

  struct RenderPassInputAttachmentAspectCreateInfo
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eRenderPassInputAttachmentAspectCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(
      uint32_t                                                     aspectReferenceCount_ = {},
      const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_    = {} ) VULKAN_HPP_NOEXCEPT
      : aspectReferenceCount( aspectReferenceCount_ )
      , pAspectReferences( pAspectReferences_ )
    {}

    VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(
      RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs )
      VULKAN_HPP_NOEXCEPT
      : RenderPassInputAttachmentAspectCreateInfo(
          *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassInputAttachmentAspectCreateInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const &
        aspectReferences_ )
      : aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) )
      , pAspectReferences( aspectReferences_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo &
                            operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassInputAttachmentAspectCreateInfo &
      operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
      return *this;
    }

    RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RenderPassInputAttachmentAspectCreateInfo &
      setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectReferenceCount = aspectReferenceCount_;
      return *this;
    }

    RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences(
      const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
    {
      pAspectReferences = pAspectReferences_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassInputAttachmentAspectCreateInfo & setAspectReferences(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const &
        aspectReferences_ ) VULKAN_HPP_NOEXCEPT
    {
      aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
      pAspectReferences    = aspectReferences_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo *>( this );
    }

    operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default;
#else
    bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) &&
             ( pAspectReferences == rhs.pAspectReferences );
    }

    bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
    const void *                        pNext = {};
    uint32_t                            aspectReferenceCount                       = {};
    const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {};
  };
  static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) ==
                   sizeof( VkRenderPassInputAttachmentAspectCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RenderPassInputAttachmentAspectCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
  {
    using Type = RenderPassInputAttachmentAspectCreateInfo;
  };
  using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;

  struct RenderPassMultiviewCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t         subpassCount_         = {},
                                                        const uint32_t * pViewMasks_           = {},
                                                        uint32_t         dependencyCount_      = {},
                                                        const int32_t *  pViewOffsets_         = {},
                                                        uint32_t         correlationMaskCount_ = {},
                                                        const uint32_t * pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT
      : subpassCount( subpassCount_ )
      , pViewMasks( pViewMasks_ )
      , dependencyCount( dependencyCount_ )
      , pViewOffsets( pViewOffsets_ )
      , correlationMaskCount( correlationMaskCount_ )
      , pCorrelationMasks( pCorrelationMasks_ )
    {}

    VULKAN_HPP_CONSTEXPR
      RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassMultiviewCreateInfo( *reinterpret_cast<RenderPassMultiviewCreateInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassMultiviewCreateInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const &  viewOffsets_      = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {} )
      : subpassCount( static_cast<uint32_t>( viewMasks_.size() ) )
      , pViewMasks( viewMasks_.data() )
      , dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) )
      , pViewOffsets( viewOffsets_.data() )
      , correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) )
      , pCorrelationMasks( correlationMasks_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo &
                            operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>( &rhs );
      return *this;
    }

    RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = subpassCount_;
      return *this;
    }

    RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewMasks = pViewMasks_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassMultiviewCreateInfo & setViewMasks(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassCount = static_cast<uint32_t>( viewMasks_.size() );
      pViewMasks   = viewMasks_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = dependencyCount_;
      return *this;
    }

    RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pViewOffsets = pViewOffsets_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassMultiviewCreateInfo & setViewOffsets(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
      pViewOffsets    = viewOffsets_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
    {
      correlationMaskCount = correlationMaskCount_;
      return *this;
    }

    RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      pCorrelationMasks = pCorrelationMasks_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassMultiviewCreateInfo & setCorrelationMasks(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
    {
      correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
      pCorrelationMasks    = correlationMasks_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo *>( this );
    }

    operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassMultiviewCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default;
#else
    bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) &&
             ( pViewMasks == rhs.pViewMasks ) && ( dependencyCount == rhs.dependencyCount ) &&
             ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) &&
             ( pCorrelationMasks == rhs.pCorrelationMasks );
    }

    bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                = StructureType::eRenderPassMultiviewCreateInfo;
    const void *                        pNext                = {};
    uint32_t                            subpassCount         = {};
    const uint32_t *                    pViewMasks           = {};
    uint32_t                            dependencyCount      = {};
    const int32_t *                     pViewOffsets         = {};
    uint32_t                            correlationMaskCount = {};
    const uint32_t *                    pCorrelationMasks    = {};
  };
  static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RenderPassMultiviewCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
  {
    using Type = RenderPassMultiviewCreateInfo;
  };
  using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;

  struct SubpassSampleLocationsEXT
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(
      uint32_t                                     subpassIndex_        = {},
      VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : subpassIndex( subpassIndex_ )
      , sampleLocationsInfo( sampleLocationsInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassSampleLocationsEXT( *reinterpret_cast<SubpassSampleLocationsEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT &
                            operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>( &rhs );
      return *this;
    }

    SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      subpassIndex = subpassIndex_;
      return *this;
    }

    SubpassSampleLocationsEXT & setSampleLocationsInfo(
      VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      sampleLocationsInfo = sampleLocationsInfo_;
      return *this;
    }

    operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassSampleLocationsEXT *>( this );
    }

    operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassSampleLocationsEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassSampleLocationsEXT const & ) const = default;
#else
    bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
    }

    bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t                                     subpassIndex        = {};
    VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
  };
  static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubpassSampleLocationsEXT>::value,
                 "struct wrapper is not a standard layout!" );

  struct RenderPassSampleLocationsBeginInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eRenderPassSampleLocationsBeginInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(
      uint32_t                                                   attachmentInitialSampleLocationsCount_ = {},
      const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_     = {},
      uint32_t                                                   postSubpassSampleLocationsCount_       = {},
      const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT *    pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT
      : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ )
      , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ )
      , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ )
      , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
    {}

    VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassSampleLocationsBeginInfoEXT(
          *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassSampleLocationsBeginInfoEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const &
        attachmentInitialSampleLocations_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const &
        postSubpassSampleLocations_ = {} )
      : attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) )
      , pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() )
      , postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) )
      , pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT &
                            operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassSampleLocationsBeginInfoEXT &
      operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
      return *this;
    }

    RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RenderPassSampleLocationsBeginInfoEXT &
      setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
      return *this;
    }

    RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations(
      const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ )
      VULKAN_HPP_NOEXCEPT
    {
      pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const &
        attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
      pAttachmentInitialSampleLocations     = attachmentInitialSampleLocations_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    RenderPassSampleLocationsBeginInfoEXT &
      setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
    {
      postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
      return *this;
    }

    RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations(
      const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const &
        postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
    {
      postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
      pPostSubpassSampleLocations     = postSubpassSampleLocations_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT *>( this );
    }

    operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default;
#else
    bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) &&
             ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) &&
             ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) &&
             ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
    }

    bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
    const void *                        pNext = {};
    uint32_t                            attachmentInitialSampleLocationsCount                    = {};
    const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {};
    uint32_t                                                   postSubpassSampleLocationsCount   = {};
    const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT *    pPostSubpassSampleLocations       = {};
  };
  static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RenderPassSampleLocationsBeginInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
  {
    using Type = RenderPassSampleLocationsBeginInfoEXT;
  };

  struct RenderPassTransformBeginInfoQCOM
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ =
        VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity ) VULKAN_HPP_NOEXCEPT : transform( transform_ )
    {}

    VULKAN_HPP_CONSTEXPR
      RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
      : RenderPassTransformBeginInfoQCOM( *reinterpret_cast<RenderPassTransformBeginInfoQCOM const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM &
                            operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const *>( &rhs );
      return *this;
    }

    RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    RenderPassTransformBeginInfoQCOM &
      setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
    {
      transform = transform_;
      return *this;
    }

    operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM *>( this );
    }

    operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkRenderPassTransformBeginInfoQCOM *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default;
#else
    bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform );
    }

    bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType = StructureType::eRenderPassTransformBeginInfoQCOM;
    void *                                            pNext = {};
    VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform =
      VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
  };
  static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<RenderPassTransformBeginInfoQCOM>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eRenderPassTransformBeginInfoQCOM>
  {
    using Type = RenderPassTransformBeginInfoQCOM;
  };

  struct SamplerCustomBorderColorCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eSamplerCustomBorderColorCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    SamplerCustomBorderColorCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {},
      VULKAN_HPP_NAMESPACE::Format          format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT
      : customBorderColor( customBorderColor_ )
      , format( format_ )
    {}

    SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerCustomBorderColorCreateInfoEXT(
          *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    SamplerCustomBorderColorCreateInfoEXT &
      operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerCustomBorderColorCreateInfoEXT &
      operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
      return *this;
    }

    SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SamplerCustomBorderColorCreateInfoEXT &
      setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
    {
      customBorderColor = customBorderColor_;
      return *this;
    }

    SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
    {
      format = format_;
      return *this;
    }

    operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT *>( this );
    }

    operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT *>( this );
    }

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType             = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
    const void *                          pNext             = {};
    VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {};
    VULKAN_HPP_NAMESPACE::Format          format            = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  };
  static_assert( sizeof( SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SamplerCustomBorderColorCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
  {
    using Type = SamplerCustomBorderColorCreateInfoEXT;
  };

  struct SamplerReductionModeCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(
      VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ =
        VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage ) VULKAN_HPP_NOEXCEPT
      : reductionMode( reductionMode_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerReductionModeCreateInfo( *reinterpret_cast<SamplerReductionModeCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo &
                            operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>( &rhs );
      return *this;
    }

    SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SamplerReductionModeCreateInfo &
      setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
    {
      reductionMode = reductionMode_;
      return *this;
    }

    operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerReductionModeCreateInfo *>( this );
    }

    operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerReductionModeCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default;
#else
    bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode );
    }

    bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType        sType = StructureType::eSamplerReductionModeCreateInfo;
    const void *                               pNext = {};
    VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode =
      VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
  };
  static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SamplerReductionModeCreateInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
  {
    using Type = SamplerReductionModeCreateInfo;
  };
  using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;

  struct SamplerYcbcrConversionImageFormatProperties
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eSamplerYcbcrConversionImageFormatProperties;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(
      uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT
      : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
    {}

    VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(
      SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs )
      VULKAN_HPP_NOEXCEPT
      : SamplerYcbcrConversionImageFormatProperties(
          *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionImageFormatProperties &
                            operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionImageFormatProperties &
      operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
      return *this;
    }

    operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties *>( this );
    }

    operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default;
#else
    bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
    }

    bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
    void *                              pNext = {};
    uint32_t                            combinedImageSamplerDescriptorCount = {};
  };
  static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) ==
                   sizeof( VkSamplerYcbcrConversionImageFormatProperties ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SamplerYcbcrConversionImageFormatProperties>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
  {
    using Type = SamplerYcbcrConversionImageFormatProperties;
  };
  using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;

  struct SamplerYcbcrConversionInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT
      : conversion( conversion_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SamplerYcbcrConversionInfo( *reinterpret_cast<SamplerYcbcrConversionInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo &
                            operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>( &rhs );
      return *this;
    }

    SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SamplerYcbcrConversionInfo &
      setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
    {
      conversion = conversion_;
      return *this;
    }

    operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSamplerYcbcrConversionInfo *>( this );
    }

    operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSamplerYcbcrConversionInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default;
#else
    bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion );
    }

    bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType      = StructureType::eSamplerYcbcrConversionInfo;
    const void *                                 pNext      = {};
    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
  };
  static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SamplerYcbcrConversionInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
  {
    using Type = SamplerYcbcrConversionInfo;
  };
  using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
  struct ScreenSurfaceCreateInfoQNX
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_   = {},
                                                     struct _screen_context *                          context_ = {},
                                                     struct _screen_window * window_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , context( context_ )
      , window( window_ )
    {}

    VULKAN_HPP_CONSTEXPR
      ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
      : ScreenSurfaceCreateInfoQNX( *reinterpret_cast<ScreenSurfaceCreateInfoQNX const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX &
                            operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const *>( &rhs );
      return *this;
    }

    ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ScreenSurfaceCreateInfoQNX &
      setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT
    {
      context = context_;
      return *this;
    }

    ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }

    operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( this );
    }

    operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkScreenSurfaceCreateInfoQNX *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default;
#  else
    bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) &&
             ( window == rhs.window );
    }

    bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType   = StructureType::eScreenSurfaceCreateInfoQNX;
    const void *                                      pNext   = {};
    VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags   = {};
    struct _screen_context *                          context = {};
    struct _screen_window *                           window  = {};
  };
  static_assert( sizeof( ScreenSurfaceCreateInfoQNX ) == sizeof( VkScreenSurfaceCreateInfoQNX ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ScreenSurfaceCreateInfoQNX>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eScreenSurfaceCreateInfoQNX>
  {
    using Type = ScreenSurfaceCreateInfoQNX;
  };
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/

  struct SemaphoreTypeCreateInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(
      VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary,
      uint64_t                            initialValue_  = {} ) VULKAN_HPP_NOEXCEPT
      : semaphoreType( semaphoreType_ )
      , initialValue( initialValue_ )
    {}

    VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : SemaphoreTypeCreateInfo( *reinterpret_cast<SemaphoreTypeCreateInfo const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo &
                            operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>( &rhs );
      return *this;
    }

    SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
    {
      semaphoreType = semaphoreType_;
      return *this;
    }

    SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
    {
      initialValue = initialValue_;
      return *this;
    }

    operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSemaphoreTypeCreateInfo *>( this );
    }

    operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSemaphoreTypeCreateInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default;
#else
    bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) &&
             ( initialValue == rhs.initialValue );
    }

    bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::eSemaphoreTypeCreateInfo;
    const void *                        pNext         = {};
    VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
    uint64_t                            initialValue  = {};
  };
  static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SemaphoreTypeCreateInfo>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
  {
    using Type = SemaphoreTypeCreateInfo;
  };
  using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;

  struct SetStateFlagsIndirectCommandNV
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) {}

    VULKAN_HPP_CONSTEXPR
      SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : SetStateFlagsIndirectCommandNV( *reinterpret_cast<SetStateFlagsIndirectCommandNV const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV &
                            operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const *>( &rhs );
      return *this;
    }

    SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT
    {
      data = data_;
      return *this;
    }

    operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSetStateFlagsIndirectCommandNV *>( this );
    }

    operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSetStateFlagsIndirectCommandNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default;
#else
    bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( data == rhs.data );
    }

    bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t data = {};
  };
  static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SetStateFlagsIndirectCommandNV>::value,
                 "struct wrapper is not a standard layout!" );

  struct ShaderModuleValidationCacheCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eShaderModuleValidationCacheCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT
      : validationCache( validationCache_ )
    {}

    VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(
      ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : ShaderModuleValidationCacheCreateInfoEXT(
          *reinterpret_cast<ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT &
                            operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderModuleValidationCacheCreateInfoEXT &
      operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs );
      return *this;
    }

    ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ShaderModuleValidationCacheCreateInfoEXT &
      setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
    {
      validationCache = validationCache_;
      return *this;
    }

    operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT *>( this );
    }

    operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default;
#else
    bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache );
    }

    bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType           = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
    const void *                             pNext           = {};
    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {};
  };
  static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) ==
                   sizeof( VkShaderModuleValidationCacheCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ShaderModuleValidationCacheCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eShaderModuleValidationCacheCreateInfoEXT>
  {
    using Type = ShaderModuleValidationCacheCreateInfoEXT;
  };

  struct ShaderResourceUsageAMD
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_             = {},
                                                 uint32_t numUsedSgprs_             = {},
                                                 uint32_t ldsSizePerLocalWorkGroup_ = {},
                                                 size_t   ldsUsageSizeInBytes_      = {},
                                                 size_t   scratchMemUsageInBytes_   = {} ) VULKAN_HPP_NOEXCEPT
      : numUsedVgprs( numUsedVgprs_ )
      , numUsedSgprs( numUsedSgprs_ )
      , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ )
      , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ )
      , scratchMemUsageInBytes( scratchMemUsageInBytes_ )
    {}

    VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShaderResourceUsageAMD( *reinterpret_cast<ShaderResourceUsageAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ShaderResourceUsageAMD &
                            operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>( &rhs );
      return *this;
    }

    operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderResourceUsageAMD *>( this );
    }

    operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderResourceUsageAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShaderResourceUsageAMD const & ) const = default;
#else
    bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) &&
             ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) &&
             ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) &&
             ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
    }

    bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t numUsedVgprs             = {};
    uint32_t numUsedSgprs             = {};
    uint32_t ldsSizePerLocalWorkGroup = {};
    size_t   ldsUsageSizeInBytes      = {};
    size_t   scratchMemUsageInBytes   = {};
  };
  static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );

  struct ShaderStatisticsInfoAMD
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14
      ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags       shaderStageMask_   = {},
                               VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_     = {},
                               uint32_t                                     numPhysicalVgprs_  = {},
                               uint32_t                                     numPhysicalSgprs_  = {},
                               uint32_t                                     numAvailableVgprs_ = {},
                               uint32_t                                     numAvailableSgprs_ = {},
                               std::array<uint32_t, 3> const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT
      : shaderStageMask( shaderStageMask_ )
      , resourceUsage( resourceUsage_ )
      , numPhysicalVgprs( numPhysicalVgprs_ )
      , numPhysicalSgprs( numPhysicalSgprs_ )
      , numAvailableVgprs( numAvailableVgprs_ )
      , numAvailableSgprs( numAvailableSgprs_ )
      , computeWorkGroupSize( computeWorkGroupSize_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : ShaderStatisticsInfoAMD( *reinterpret_cast<ShaderStatisticsInfoAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD &
                            operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>( &rhs );
      return *this;
    }

    operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkShaderStatisticsInfoAMD *>( this );
    }

    operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkShaderStatisticsInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default;
#else
    bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) &&
             ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) &&
             ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) &&
             ( computeWorkGroupSize == rhs.computeWorkGroupSize );
    }

    bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::ShaderStageFlags            shaderStageMask      = {};
    VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD      resourceUsage        = {};
    uint32_t                                          numPhysicalVgprs     = {};
    uint32_t                                          numPhysicalSgprs     = {};
    uint32_t                                          numAvailableVgprs    = {};
    uint32_t                                          numAvailableSgprs    = {};
    VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> computeWorkGroupSize = {};
  };
  static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );

  struct SharedPresentSurfaceCapabilitiesKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eSharedPresentSurfaceCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(
      VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT
      : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
    {}

    VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast<SharedPresentSurfaceCapabilitiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SharedPresentSurfaceCapabilitiesKHR &
                            operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SharedPresentSurfaceCapabilitiesKHR &
      operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR *>( this );
    }

    operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default;
#else
    bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
    }

    bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
    void *                                pNext = {};
    VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
  };
  static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SharedPresentSurfaceCapabilitiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
  {
    using Type = SharedPresentSurfaceCapabilitiesKHR;
  };

#if defined( VK_USE_PLATFORM_GGP )
  struct StreamDescriptorSurfaceCreateInfoGGP
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eStreamDescriptorSurfaceCreateInfoGGP;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {},
                                            GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , streamDescriptor( streamDescriptor_ )
    {}

    VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
      : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast<StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP &
                            operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    StreamDescriptorSurfaceCreateInfoGGP &
      operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs );
      return *this;
    }

    StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    StreamDescriptorSurfaceCreateInfoGGP &
      setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    StreamDescriptorSurfaceCreateInfoGGP &
      setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
    {
      streamDescriptor = streamDescriptor_;
      return *this;
    }

    operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( this );
    }

    operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & ) const = default;
#  else
    bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 );
    }

    bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
    const void *                        pNext = {};
    VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags            = {};
    GgpStreamDescriptor                                         streamDescriptor = {};
  };
  static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<StreamDescriptorSurfaceCreateInfoGGP>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eStreamDescriptorSurfaceCreateInfoGGP>
  {
    using Type = StreamDescriptorSurfaceCreateInfoGGP;
  };
#endif /*VK_USE_PLATFORM_GGP*/

  struct SubpassDescriptionDepthStencilResolve
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eSubpassDescriptionDepthStencilResolve;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(
      VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_   = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
      VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone,
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT
      : depthResolveMode( depthResolveMode_ )
      , stencilResolveMode( stencilResolveMode_ )
      , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
    {}

    VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
      : SubpassDescriptionDepthStencilResolve(
          *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve &
                            operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SubpassDescriptionDepthStencilResolve &
      operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>( &rhs );
      return *this;
    }

    SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SubpassDescriptionDepthStencilResolve &
      setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
    {
      depthResolveMode = depthResolveMode_;
      return *this;
    }

    SubpassDescriptionDepthStencilResolve &
      setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
    {
      stencilResolveMode = stencilResolveMode_;
      return *this;
    }

    SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment(
      const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
    {
      pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
      return *this;
    }

    operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve *>( this );
    }

    operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default;
#else
    bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) &&
             ( stencilResolveMode == rhs.stencilResolveMode ) &&
             ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
    }

    bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType            = StructureType::eSubpassDescriptionDepthStencilResolve;
    const void *                              pNext            = {};
    VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
    VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
    const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {};
  };
  static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SubpassDescriptionDepthStencilResolve>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
  {
    using Type = SubpassDescriptionDepthStencilResolve;
  };
  using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct SurfaceCapabilitiesFullScreenExclusiveEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(
      VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT
      : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
    {}

    VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(
      SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : SurfaceCapabilitiesFullScreenExclusiveEXT(
          *reinterpret_cast<SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT &
                            operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceCapabilitiesFullScreenExclusiveEXT &
      operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs );
      return *this;
    }

    SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SurfaceCapabilitiesFullScreenExclusiveEXT &
      setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
    {
      fullScreenExclusiveSupported = fullScreenExclusiveSupported_;
      return *this;
    }

    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT *>( this );
    }

    operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default;
#  else
    bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
    }

    bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        fullScreenExclusiveSupported = {};
  };
  static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) ==
                   sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SurfaceCapabilitiesFullScreenExclusiveEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT>
  {
    using Type = SurfaceCapabilitiesFullScreenExclusiveEXT;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct SurfaceFullScreenExclusiveInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eSurfaceFullScreenExclusiveInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ =
                                           VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT
      : fullScreenExclusive( fullScreenExclusive_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT &
                            operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>( &rhs );
      return *this;
    }

    SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SurfaceFullScreenExclusiveInfoEXT &
      setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
    {
      fullScreenExclusive = fullScreenExclusive_;
      return *this;
    }

    operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT *>( this );
    }

    operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default;
#  else
    bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive );
    }

    bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
    void *                                       pNext = {};
    VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive =
      VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
  };
  static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveInfoEXT>
  {
    using Type = SurfaceFullScreenExclusiveInfoEXT;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct SurfaceFullScreenExclusiveWin32InfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {} ) VULKAN_HPP_NOEXCEPT
      : hmonitor( hmonitor_ )
    {}

    VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceFullScreenExclusiveWin32InfoEXT(
          *reinterpret_cast<SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT &
                            operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceFullScreenExclusiveWin32InfoEXT &
      operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs );
      return *this;
    }

    SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
    {
      hmonitor = hmonitor_;
      return *this;
    }

    operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT *>( this );
    }

    operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default;
#  else
    bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor );
    }

    bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
    const void *                        pNext    = {};
    HMONITOR                            hmonitor = {};
  };
  static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveWin32InfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT>
  {
    using Type = SurfaceFullScreenExclusiveWin32InfoEXT;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  struct SurfaceProtectedCapabilitiesKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT
      : supportsProtected( supportsProtected_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast<SurfaceProtectedCapabilitiesKHR const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR &
                            operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>( &rhs );
      return *this;
    }

    SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SurfaceProtectedCapabilitiesKHR &
      setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
    {
      supportsProtected = supportsProtected_;
      return *this;
    }

    operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR *>( this );
    }

    operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default;
#else
    bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected );
    }

    bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType             = StructureType::eSurfaceProtectedCapabilitiesKHR;
    const void *                        pNext             = {};
    VULKAN_HPP_NAMESPACE::Bool32        supportsProtected = {};
  };
  static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SurfaceProtectedCapabilitiesKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSurfaceProtectedCapabilitiesKHR>
  {
    using Type = SurfaceProtectedCapabilitiesKHR;
  };

  struct SwapchainCounterCreateInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT
      : surfaceCounters( surfaceCounters_ )
    {}

    VULKAN_HPP_CONSTEXPR
      SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainCounterCreateInfoEXT( *reinterpret_cast<SwapchainCounterCreateInfoEXT const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT &
                            operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>( &rhs );
      return *this;
    }

    SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SwapchainCounterCreateInfoEXT &
      setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
    {
      surfaceCounters = surfaceCounters_;
      return *this;
    }

    operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT *>( this );
    }

    operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default;
#else
    bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters );
    }

    bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType           = StructureType::eSwapchainCounterCreateInfoEXT;
    const void *                                 pNext           = {};
    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
  };
  static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SwapchainCounterCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
  {
    using Type = SwapchainCounterCreateInfoEXT;
  };

  struct SwapchainDisplayNativeHdrCreateInfoAMD
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} )
      VULKAN_HPP_NOEXCEPT : localDimmingEnable( localDimmingEnable_ )
    {}

    VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : SwapchainDisplayNativeHdrCreateInfoAMD(
          *reinterpret_cast<SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD &
                            operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    SwapchainDisplayNativeHdrCreateInfoAMD &
      operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs );
      return *this;
    }

    SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    SwapchainDisplayNativeHdrCreateInfoAMD &
      setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      localDimmingEnable = localDimmingEnable_;
      return *this;
    }

    operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD *>( this );
    }

    operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default;
#else
    bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable );
    }

    bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType              = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
    const void *                        pNext              = {};
    VULKAN_HPP_NAMESPACE::Bool32        localDimmingEnable = {};
  };
  static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<SwapchainDisplayNativeHdrCreateInfoAMD>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD>
  {
    using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
  };

  struct TextureLODGatherFormatPropertiesAMD
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eTextureLodGatherFormatPropertiesAMD;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(
      VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT
      : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
    {}

    VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
      : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast<TextureLODGatherFormatPropertiesAMD const *>( &rhs ) )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 TextureLODGatherFormatPropertiesAMD &
                            operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TextureLODGatherFormatPropertiesAMD &
      operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>( &rhs );
      return *this;
    }

    operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD *>( this );
    }

    operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default;
#else
    bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
    }

    bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
    void *                              pNext = {};
    VULKAN_HPP_NAMESPACE::Bool32        supportsTextureGatherLODBiasAMD = {};
  };
  static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<TextureLODGatherFormatPropertiesAMD>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eTextureLodGatherFormatPropertiesAMD>
  {
    using Type = TextureLODGatherFormatPropertiesAMD;
  };

  struct TimelineSemaphoreSubmitInfo
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      TimelineSemaphoreSubmitInfo( uint32_t         waitSemaphoreValueCount_   = {},
                                   const uint64_t * pWaitSemaphoreValues_      = {},
                                   uint32_t         signalSemaphoreValueCount_ = {},
                                   const uint64_t * pSignalSemaphoreValues_    = {} ) VULKAN_HPP_NOEXCEPT
      : waitSemaphoreValueCount( waitSemaphoreValueCount_ )
      , pWaitSemaphoreValues( pWaitSemaphoreValues_ )
      , signalSemaphoreValueCount( signalSemaphoreValueCount_ )
      , pSignalSemaphoreValues( pSignalSemaphoreValues_ )
    {}

    VULKAN_HPP_CONSTEXPR
      TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
      : TimelineSemaphoreSubmitInfo( *reinterpret_cast<TimelineSemaphoreSubmitInfo const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TimelineSemaphoreSubmitInfo(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
      : waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) )
      , pWaitSemaphoreValues( waitSemaphoreValues_.data() )
      , signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) )
      , pSignalSemaphoreValues( signalSemaphoreValues_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo &
                            operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>( &rhs );
      return *this;
    }

    TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreValueCount = waitSemaphoreValueCount_;
      return *this;
    }

    TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pWaitSemaphoreValues = pWaitSemaphoreValues_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
      pWaitSemaphoreValues    = waitSemaphoreValues_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    TimelineSemaphoreSubmitInfo &
      setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreValueCount = signalSemaphoreValueCount_;
      return *this;
    }

    TimelineSemaphoreSubmitInfo &
      setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      pSignalSemaphoreValues = pSignalSemaphoreValues_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
    {
      signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
      pSignalSemaphoreValues    = signalSemaphoreValues_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo *>( this );
    }

    operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default;
#else
    bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) &&
             ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) &&
             ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) &&
             ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
    }

    bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType                     = StructureType::eTimelineSemaphoreSubmitInfo;
    const void *                        pNext                     = {};
    uint32_t                            waitSemaphoreValueCount   = {};
    const uint64_t *                    pWaitSemaphoreValues      = {};
    uint32_t                            signalSemaphoreValueCount = {};
    const uint64_t *                    pSignalSemaphoreValues    = {};
  };
  static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<TimelineSemaphoreSubmitInfo>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
  {
    using Type = TimelineSemaphoreSubmitInfo;
  };
  using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;

  struct TraceRaysIndirectCommandKHR
  {
#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_  = {},
                                                      uint32_t height_ = {},
                                                      uint32_t depth_  = {} ) VULKAN_HPP_NOEXCEPT
      : width( width_ )
      , height( height_ )
      , depth( depth_ )
    {}

    VULKAN_HPP_CONSTEXPR
      TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : TraceRaysIndirectCommandKHR( *reinterpret_cast<TraceRaysIndirectCommandKHR const *>( &rhs ) )
    {}

    explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} )
      : width( extent2D.width ), height( extent2D.height ), depth( depth_ )
    {}
#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR &
                            operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const *>( &rhs );
      return *this;
    }

    TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
    {
      width = width_;
      return *this;
    }

    TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
    {
      height = height_;
      return *this;
    }

    TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
    {
      depth = depth_;
      return *this;
    }

    operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkTraceRaysIndirectCommandKHR *>( this );
    }

    operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkTraceRaysIndirectCommandKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default;
#else
    bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth );
    }

    bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    uint32_t width  = {};
    uint32_t height = {};
    uint32_t depth  = {};
  };
  static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<TraceRaysIndirectCommandKHR>::value,
                 "struct wrapper is not a standard layout!" );

  struct ValidationFeaturesEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(
      uint32_t                                                  enabledValidationFeatureCount_  = {},
      const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT *  pEnabledValidationFeatures_     = {},
      uint32_t                                                  disabledValidationFeatureCount_ = {},
      const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT
      : enabledValidationFeatureCount( enabledValidationFeatureCount_ )
      , pEnabledValidationFeatures( pEnabledValidationFeatures_ )
      , disabledValidationFeatureCount( disabledValidationFeatureCount_ )
      , pDisabledValidationFeatures( pDisabledValidationFeatures_ )
    {}

    VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ValidationFeaturesEXT( *reinterpret_cast<ValidationFeaturesEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFeaturesEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const &
        enabledValidationFeatures_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const &
        disabledValidationFeatures_ = {} )
      : enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) )
      , pEnabledValidationFeatures( enabledValidationFeatures_.data() )
      , disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) )
      , pDisabledValidationFeatures( disabledValidationFeatures_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT &
                            operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>( &rhs );
      return *this;
    }

    ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ValidationFeaturesEXT &
      setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledValidationFeatureCount = enabledValidationFeatureCount_;
      return *this;
    }

    ValidationFeaturesEXT & setPEnabledValidationFeatures(
      const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      pEnabledValidationFeatures = pEnabledValidationFeatures_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFeaturesEXT & setEnabledValidationFeatures(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const &
        enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
      pEnabledValidationFeatures    = enabledValidationFeatures_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    ValidationFeaturesEXT &
      setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      disabledValidationFeatureCount = disabledValidationFeatureCount_;
      return *this;
    }

    ValidationFeaturesEXT & setPDisabledValidationFeatures(
      const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      pDisabledValidationFeatures = pDisabledValidationFeatures_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFeaturesEXT & setDisabledValidationFeatures(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const &
        disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
    {
      disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
      pDisabledValidationFeatures    = disabledValidationFeatures_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkValidationFeaturesEXT *>( this );
    }

    operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkValidationFeaturesEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ValidationFeaturesEXT const & ) const = default;
#else
    bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) &&
             ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) &&
             ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) &&
             ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
    }

    bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                       sType = StructureType::eValidationFeaturesEXT;
    const void *                                              pNext = {};
    uint32_t                                                  enabledValidationFeatureCount  = {};
    const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT *  pEnabledValidationFeatures     = {};
    uint32_t                                                  disabledValidationFeatureCount = {};
    const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures    = {};
  };
  static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
  {
    using Type = ValidationFeaturesEXT;
  };

  struct ValidationFlagsEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(
      uint32_t                                         disabledValidationCheckCount_ = {},
      const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_    = {} ) VULKAN_HPP_NOEXCEPT
      : disabledValidationCheckCount( disabledValidationCheckCount_ )
      , pDisabledValidationChecks( pDisabledValidationChecks_ )
    {}

    VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : ValidationFlagsEXT( *reinterpret_cast<ValidationFlagsEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFlagsEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const &
        disabledValidationChecks_ )
      : disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) )
      , pDisabledValidationChecks( disabledValidationChecks_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT &
                            operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>( &rhs );
      return *this;
    }

    ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
    {
      disabledValidationCheckCount = disabledValidationCheckCount_;
      return *this;
    }

    ValidationFlagsEXT & setPDisabledValidationChecks(
      const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
    {
      pDisabledValidationChecks = pDisabledValidationChecks_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    ValidationFlagsEXT & setDisabledValidationChecks(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const &
        disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
    {
      disabledValidationCheckCount = static_cast<uint32_t>( disabledValidationChecks_.size() );
      pDisabledValidationChecks    = disabledValidationChecks_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkValidationFlagsEXT *>( this );
    }

    operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkValidationFlagsEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ValidationFlagsEXT const & ) const = default;
#else
    bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) &&
             ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
    }

    bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType                        = StructureType::eValidationFlagsEXT;
    const void *                                     pNext                        = {};
    uint32_t                                         disabledValidationCheckCount = {};
    const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks    = {};
  };
  static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eValidationFlagsEXT>
  {
    using Type = ValidationFlagsEXT;
  };

#if defined( VK_USE_PLATFORM_VI_NN )
  struct ViSurfaceCreateInfoNN
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {},
                                                void * window_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , window( window_ )
    {}

    VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
      : ViSurfaceCreateInfoNN( *reinterpret_cast<ViSurfaceCreateInfoNN const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN &
                            operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>( &rhs );
      return *this;
    }

    ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }

    operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkViSurfaceCreateInfoNN *>( this );
    }

    operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkViSurfaceCreateInfoNN *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default;
#  else
    bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window );
    }

    bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType          sType  = StructureType::eViSurfaceCreateInfoNN;
    const void *                                 pNext  = {};
    VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags  = {};
    void *                                       window = {};
  };
  static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eViSurfaceCreateInfoNN>
  {
    using Type = ViSurfaceCreateInfoNN;
  };
#endif /*VK_USE_PLATFORM_VI_NN*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH264CapabilitiesEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT(
      uint32_t                                  maxLevel_               = {},
      VULKAN_HPP_NAMESPACE::Offset2D            fieldOffsetGranularity_ = {},
      VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_    = {} ) VULKAN_HPP_NOEXCEPT
      : maxLevel( maxLevel_ )
      , fieldOffsetGranularity( fieldOffsetGranularity_ )
      , stdExtensionVersion( stdExtensionVersion_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      VideoDecodeH264CapabilitiesEXT( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264CapabilitiesEXT( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264CapabilitiesEXT( *reinterpret_cast<VideoDecodeH264CapabilitiesEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264CapabilitiesEXT &
                            operator=( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264CapabilitiesEXT & operator=( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT const *>( &rhs );
      return *this;
    }

    operator VkVideoDecodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264CapabilitiesEXT *>( this );
    }

    operator VkVideoDecodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264CapabilitiesEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264CapabilitiesEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) &&
             ( fieldOffsetGranularity == rhs.fieldOffsetGranularity ) &&
             ( stdExtensionVersion == rhs.stdExtensionVersion );
    }

    bool operator!=( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType                  = StructureType::eVideoDecodeH264CapabilitiesEXT;
    void *                                    pNext                  = {};
    uint32_t                                  maxLevel               = {};
    VULKAN_HPP_NAMESPACE::Offset2D            fieldOffsetGranularity = {};
    VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion    = {};
  };
  static_assert( sizeof( VideoDecodeH264CapabilitiesEXT ) == sizeof( VkVideoDecodeH264CapabilitiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH264CapabilitiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264CapabilitiesEXT>
  {
    using Type = VideoDecodeH264CapabilitiesEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH264DpbSlotInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT(
      const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : pStdReferenceInfo( pStdReferenceInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH264DpbSlotInfoEXT( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264DpbSlotInfoEXT( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264DpbSlotInfoEXT( *reinterpret_cast<VideoDecodeH264DpbSlotInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT &
                            operator=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264DpbSlotInfoEXT & operator=( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH264DpbSlotInfoEXT &
      setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdReferenceInfo = pStdReferenceInfo_;
      return *this;
    }

    operator VkVideoDecodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264DpbSlotInfoEXT *>( this );
    }

    operator VkVideoDecodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264DpbSlotInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264DpbSlotInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
    }

    bool operator!=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType     sType             = StructureType::eVideoDecodeH264DpbSlotInfoEXT;
    const void *                            pNext             = {};
    const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {};
  };
  static_assert( sizeof( VideoDecodeH264DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH264DpbSlotInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH264DpbSlotInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264DpbSlotInfoEXT>
  {
    using Type = VideoDecodeH264DpbSlotInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH264MvcEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264MvcEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( const StdVideoDecodeH264Mvc * pStdMvc_ = {} ) VULKAN_HPP_NOEXCEPT
      : pStdMvc( pStdMvc_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcEXT( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264MvcEXT( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264MvcEXT( *reinterpret_cast<VideoDecodeH264MvcEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcEXT &
                            operator=( VideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264MvcEXT & operator=( VkVideoDecodeH264MvcEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH264MvcEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH264MvcEXT & setPStdMvc( const StdVideoDecodeH264Mvc * pStdMvc_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdMvc = pStdMvc_;
      return *this;
    }

    operator VkVideoDecodeH264MvcEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264MvcEXT *>( this );
    }

    operator VkVideoDecodeH264MvcEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264MvcEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264MvcEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdMvc == rhs.pStdMvc );
    }

    bool operator!=( VideoDecodeH264MvcEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType   = StructureType::eVideoDecodeH264MvcEXT;
    const void *                        pNext   = {};
    const StdVideoDecodeH264Mvc *       pStdMvc = {};
  };
  static_assert( sizeof( VideoDecodeH264MvcEXT ) == sizeof( VkVideoDecodeH264MvcEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH264MvcEXT>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264MvcEXT>
  {
    using Type = VideoDecodeH264MvcEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH264PictureInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {},
                                                        uint32_t                              slicesCount_     = {},
                                                        const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
      : pStdPictureInfo( pStdPictureInfo_ )
      , slicesCount( slicesCount_ )
      , pSlicesDataOffsets( pSlicesDataOffsets_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH264PictureInfoEXT( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264PictureInfoEXT( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264PictureInfoEXT( *reinterpret_cast<VideoDecodeH264PictureInfoEXT const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264PictureInfoEXT(
      const StdVideoDecodeH264PictureInfo *                                 pStdPictureInfo_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ )
      : pStdPictureInfo( pStdPictureInfo_ )
      , slicesCount( static_cast<uint32_t>( slicesDataOffsets_.size() ) )
      , pSlicesDataOffsets( slicesDataOffsets_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT &
                            operator=( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264PictureInfoEXT & operator=( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH264PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH264PictureInfoEXT &
      setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }

    VideoDecodeH264PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      slicesCount = slicesCount_;
      return *this;
    }

    VideoDecodeH264PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pSlicesDataOffsets = pSlicesDataOffsets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264PictureInfoEXT & setSlicesDataOffsets(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      slicesCount        = static_cast<uint32_t>( slicesDataOffsets_.size() );
      pSlicesDataOffsets = slicesDataOffsets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkVideoDecodeH264PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264PictureInfoEXT *>( this );
    }

    operator VkVideoDecodeH264PictureInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264PictureInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264PictureInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) &&
             ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets );
    }

    bool operator!=( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType              = StructureType::eVideoDecodeH264PictureInfoEXT;
    const void *                          pNext              = {};
    const StdVideoDecodeH264PictureInfo * pStdPictureInfo    = {};
    uint32_t                              slicesCount        = {};
    const uint32_t *                      pSlicesDataOffsets = {};
  };
  static_assert( sizeof( VideoDecodeH264PictureInfoEXT ) == sizeof( VkVideoDecodeH264PictureInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH264PictureInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264PictureInfoEXT>
  {
    using Type = VideoDecodeH264PictureInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH264ProfileEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileEXT(
      StdVideoH264ProfileIdc                                   stdProfileIdc_ = {},
      VULKAN_HPP_NAMESPACE::VideoDecodeH264FieldLayoutFlagsEXT fieldLayout_   = {} ) VULKAN_HPP_NOEXCEPT
      : stdProfileIdc( stdProfileIdc_ )
      , fieldLayout( fieldLayout_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH264ProfileEXT( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264ProfileEXT( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264ProfileEXT( *reinterpret_cast<VideoDecodeH264ProfileEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileEXT &
                            operator=( VideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264ProfileEXT & operator=( VkVideoDecodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH264ProfileEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfileIdc = stdProfileIdc_;
      return *this;
    }

    VideoDecodeH264ProfileEXT &
      setFieldLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264FieldLayoutFlagsEXT fieldLayout_ ) VULKAN_HPP_NOEXCEPT
    {
      fieldLayout = fieldLayout_;
      return *this;
    }

    operator VkVideoDecodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264ProfileEXT *>( this );
    }

    operator VkVideoDecodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264ProfileEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264ProfileEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) &&
             ( fieldLayout == rhs.fieldLayout );
    }

    bool operator!=( VideoDecodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                      sType         = StructureType::eVideoDecodeH264ProfileEXT;
    const void *                                             pNext         = {};
    StdVideoH264ProfileIdc                                   stdProfileIdc = {};
    VULKAN_HPP_NAMESPACE::VideoDecodeH264FieldLayoutFlagsEXT fieldLayout   = {};
  };
  static_assert( sizeof( VideoDecodeH264ProfileEXT ) == sizeof( VkVideoDecodeH264ProfileEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH264ProfileEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264ProfileEXT>
  {
    using Type = VideoDecodeH264ProfileEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH264SessionCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoDecodeH264SessionCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_                = {},
      const VULKAN_HPP_NAMESPACE::ExtensionProperties *   pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pStdExtensionVersion( pStdExtensionVersion_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT( VideoDecodeH264SessionCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264SessionCreateInfoEXT( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264SessionCreateInfoEXT( *reinterpret_cast<VideoDecodeH264SessionCreateInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionCreateInfoEXT &
                            operator=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264SessionCreateInfoEXT &
      operator=( VkVideoDecodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionCreateInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH264SessionCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VideoDecodeH264SessionCreateInfoEXT & setPStdExtensionVersion(
      const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdExtensionVersion = pStdExtensionVersion_;
      return *this;
    }

    operator VkVideoDecodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264SessionCreateInfoEXT *>( this );
    }

    operator VkVideoDecodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264SessionCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264SessionCreateInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( pStdExtensionVersion == rhs.pStdExtensionVersion );
    }

    bool operator!=( VideoDecodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType = StructureType::eVideoDecodeH264SessionCreateInfoEXT;
    const void *                                        pNext = {};
    VULKAN_HPP_NAMESPACE::VideoDecodeH264CreateFlagsEXT flags = {};
    const VULKAN_HPP_NAMESPACE::ExtensionProperties *   pStdExtensionVersion = {};
  };
  static_assert( sizeof( VideoDecodeH264SessionCreateInfoEXT ) == sizeof( VkVideoDecodeH264SessionCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH264SessionCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264SessionCreateInfoEXT>
  {
    using Type = VideoDecodeH264SessionCreateInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH264SessionParametersAddInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoDecodeH264SessionParametersAddInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT(
      uint32_t                                 spsStdCount_ = {},
      const StdVideoH264SequenceParameterSet * pSpsStd_     = {},
      uint32_t                                 ppsStdCount_ = {},
      const StdVideoH264PictureParameterSet *  pPpsStd_     = {} ) VULKAN_HPP_NOEXCEPT
      : spsStdCount( spsStdCount_ )
      , pSpsStd( pSpsStd_ )
      , ppsStdCount( ppsStdCount_ )
      , pPpsStd( pPpsStd_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT(
      VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264SessionParametersAddInfoEXT( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264SessionParametersAddInfoEXT(
          *reinterpret_cast<VideoDecodeH264SessionParametersAddInfoEXT const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264SessionParametersAddInfoEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const &  ppsStd_ = {} )
      : spsStdCount( static_cast<uint32_t>( spsStd_.size() ) )
      , pSpsStd( spsStd_.data() )
      , ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) )
      , pPpsStd( ppsStd_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT &
                            operator=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264SessionParametersAddInfoEXT &
      operator=( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      spsStdCount = spsStdCount_;
      return *this;
    }

    VideoDecodeH264SessionParametersAddInfoEXT &
      setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
    {
      pSpsStd = pSpsStd_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264SessionParametersAddInfoEXT &
      setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_ )
        VULKAN_HPP_NOEXCEPT
    {
      spsStdCount = static_cast<uint32_t>( spsStd_.size() );
      pSpsStd     = spsStd_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoDecodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      ppsStdCount = ppsStdCount_;
      return *this;
    }

    VideoDecodeH264SessionParametersAddInfoEXT &
      setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
    {
      pPpsStd = pPpsStd_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH264SessionParametersAddInfoEXT &
      setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ )
        VULKAN_HPP_NOEXCEPT
    {
      ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
      pPpsStd     = ppsStd_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkVideoDecodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264SessionParametersAddInfoEXT *>( this );
    }

    operator VkVideoDecodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264SessionParametersAddInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264SessionParametersAddInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) &&
             ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd );
    }

    bool operator!=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType       = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT;
    const void *                             pNext       = {};
    uint32_t                                 spsStdCount = {};
    const StdVideoH264SequenceParameterSet * pSpsStd     = {};
    uint32_t                                 ppsStdCount = {};
    const StdVideoH264PictureParameterSet *  pPpsStd     = {};
  };
  static_assert( sizeof( VideoDecodeH264SessionParametersAddInfoEXT ) ==
                   sizeof( VkVideoDecodeH264SessionParametersAddInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH264SessionParametersAddInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersAddInfoEXT>
  {
    using Type = VideoDecodeH264SessionParametersAddInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH264SessionParametersCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT(
      uint32_t                                                                 maxSpsStdCount_     = {},
      uint32_t                                                                 maxPpsStdCount_     = {},
      const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} )
      VULKAN_HPP_NOEXCEPT
      : maxSpsStdCount( maxSpsStdCount_ )
      , maxPpsStdCount( maxPpsStdCount_ )
      , pParametersAddInfo( pParametersAddInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersCreateInfoEXT(
      VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264SessionParametersCreateInfoEXT( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : VideoDecodeH264SessionParametersCreateInfoEXT(
          *reinterpret_cast<VideoDecodeH264SessionParametersCreateInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT &
                            operator=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH264SessionParametersCreateInfoEXT &
      operator=( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSpsStdCount = maxSpsStdCount_;
      return *this;
    }

    VideoDecodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPpsStdCount = maxPpsStdCount_;
      return *this;
    }

    VideoDecodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo(
      const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pParametersAddInfo = pParametersAddInfo_;
      return *this;
    }

    operator VkVideoDecodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH264SessionParametersCreateInfoEXT *>( this );
    }

    operator VkVideoDecodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH264SessionParametersCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH264SessionParametersCreateInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) &&
             ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
    }

    bool operator!=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT;
    const void *                        pNext          = {};
    uint32_t                            maxSpsStdCount = {};
    uint32_t                            maxPpsStdCount = {};
    const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {};
  };
  static_assert( sizeof( VideoDecodeH264SessionParametersCreateInfoEXT ) ==
                   sizeof( VkVideoDecodeH264SessionParametersCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH264SessionParametersCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT>
  {
    using Type = VideoDecodeH264SessionParametersCreateInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH265CapabilitiesEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT(
      uint32_t maxLevel_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
      : maxLevel( maxLevel_ )
      , stdExtensionVersion( stdExtensionVersion_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      VideoDecodeH265CapabilitiesEXT( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265CapabilitiesEXT( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265CapabilitiesEXT( *reinterpret_cast<VideoDecodeH265CapabilitiesEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265CapabilitiesEXT &
                            operator=( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265CapabilitiesEXT & operator=( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT const *>( &rhs );
      return *this;
    }

    operator VkVideoDecodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265CapabilitiesEXT *>( this );
    }

    operator VkVideoDecodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265CapabilitiesEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265CapabilitiesEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxLevel == rhs.maxLevel ) &&
             ( stdExtensionVersion == rhs.stdExtensionVersion );
    }

    bool operator!=( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType       sType               = StructureType::eVideoDecodeH265CapabilitiesEXT;
    void *                                    pNext               = {};
    uint32_t                                  maxLevel            = {};
    VULKAN_HPP_NAMESPACE::ExtensionProperties stdExtensionVersion = {};
  };
  static_assert( sizeof( VideoDecodeH265CapabilitiesEXT ) == sizeof( VkVideoDecodeH265CapabilitiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH265CapabilitiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265CapabilitiesEXT>
  {
    using Type = VideoDecodeH265CapabilitiesEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH265DpbSlotInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT(
      const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : pStdReferenceInfo( pStdReferenceInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH265DpbSlotInfoEXT( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265DpbSlotInfoEXT( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265DpbSlotInfoEXT( *reinterpret_cast<VideoDecodeH265DpbSlotInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT &
                            operator=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265DpbSlotInfoEXT & operator=( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH265DpbSlotInfoEXT &
      setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdReferenceInfo = pStdReferenceInfo_;
      return *this;
    }

    operator VkVideoDecodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265DpbSlotInfoEXT *>( this );
    }

    operator VkVideoDecodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265DpbSlotInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265DpbSlotInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo );
    }

    bool operator!=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType     sType             = StructureType::eVideoDecodeH265DpbSlotInfoEXT;
    const void *                            pNext             = {};
    const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {};
  };
  static_assert( sizeof( VideoDecodeH265DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH265DpbSlotInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH265DpbSlotInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265DpbSlotInfoEXT>
  {
    using Type = VideoDecodeH265DpbSlotInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH265PictureInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {},
                                                        uint32_t                        slicesCount_     = {},
                                                        const uint32_t * pSlicesDataOffsets_ = {} ) VULKAN_HPP_NOEXCEPT
      : pStdPictureInfo( pStdPictureInfo_ )
      , slicesCount( slicesCount_ )
      , pSlicesDataOffsets( pSlicesDataOffsets_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH265PictureInfoEXT( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265PictureInfoEXT( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265PictureInfoEXT( *reinterpret_cast<VideoDecodeH265PictureInfoEXT const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265PictureInfoEXT(
      StdVideoDecodeH265PictureInfo *                                       pStdPictureInfo_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ )
      : pStdPictureInfo( pStdPictureInfo_ )
      , slicesCount( static_cast<uint32_t>( slicesDataOffsets_.size() ) )
      , pSlicesDataOffsets( slicesDataOffsets_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT &
                            operator=( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265PictureInfoEXT & operator=( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH265PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH265PictureInfoEXT &
      setPStdPictureInfo( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }

    VideoDecodeH265PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT
    {
      slicesCount = slicesCount_;
      return *this;
    }

    VideoDecodeH265PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      pSlicesDataOffsets = pSlicesDataOffsets_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265PictureInfoEXT & setSlicesDataOffsets(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT
    {
      slicesCount        = static_cast<uint32_t>( slicesDataOffsets_.size() );
      pSlicesDataOffsets = slicesDataOffsets_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkVideoDecodeH265PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265PictureInfoEXT *>( this );
    }

    operator VkVideoDecodeH265PictureInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265PictureInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265PictureInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) &&
             ( slicesCount == rhs.slicesCount ) && ( pSlicesDataOffsets == rhs.pSlicesDataOffsets );
    }

    bool operator!=( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType              = StructureType::eVideoDecodeH265PictureInfoEXT;
    const void *                        pNext              = {};
    StdVideoDecodeH265PictureInfo *     pStdPictureInfo    = {};
    uint32_t                            slicesCount        = {};
    const uint32_t *                    pSlicesDataOffsets = {};
  };
  static_assert( sizeof( VideoDecodeH265PictureInfoEXT ) == sizeof( VkVideoDecodeH265PictureInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH265PictureInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265PictureInfoEXT>
  {
    using Type = VideoDecodeH265PictureInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH265ProfileEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT
      : stdProfileIdc( stdProfileIdc_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoDecodeH265ProfileEXT( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265ProfileEXT( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265ProfileEXT( *reinterpret_cast<VideoDecodeH265ProfileEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileEXT &
                            operator=( VideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265ProfileEXT & operator=( VkVideoDecodeH265ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH265ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH265ProfileEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfileIdc = stdProfileIdc_;
      return *this;
    }

    operator VkVideoDecodeH265ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265ProfileEXT *>( this );
    }

    operator VkVideoDecodeH265ProfileEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265ProfileEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265ProfileEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 );
    }

    bool operator!=( VideoDecodeH265ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::eVideoDecodeH265ProfileEXT;
    const void *                        pNext         = {};
    StdVideoH265ProfileIdc              stdProfileIdc = {};
  };
  static_assert( sizeof( VideoDecodeH265ProfileEXT ) == sizeof( VkVideoDecodeH265ProfileEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH265ProfileEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265ProfileEXT>
  {
    using Type = VideoDecodeH265ProfileEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH265SessionCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoDecodeH265SessionCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_                = {},
      const VULKAN_HPP_NAMESPACE::ExtensionProperties *   pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , pStdExtensionVersion( pStdExtensionVersion_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT( VideoDecodeH265SessionCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265SessionCreateInfoEXT( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265SessionCreateInfoEXT( *reinterpret_cast<VideoDecodeH265SessionCreateInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionCreateInfoEXT &
                            operator=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265SessionCreateInfoEXT &
      operator=( VkVideoDecodeH265SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionCreateInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH265SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH265SessionCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VideoDecodeH265SessionCreateInfoEXT & setPStdExtensionVersion(
      const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdExtensionVersion = pStdExtensionVersion_;
      return *this;
    }

    operator VkVideoDecodeH265SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265SessionCreateInfoEXT *>( this );
    }

    operator VkVideoDecodeH265SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265SessionCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265SessionCreateInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( pStdExtensionVersion == rhs.pStdExtensionVersion );
    }

    bool operator!=( VideoDecodeH265SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType = StructureType::eVideoDecodeH265SessionCreateInfoEXT;
    const void *                                        pNext = {};
    VULKAN_HPP_NAMESPACE::VideoDecodeH265CreateFlagsEXT flags = {};
    const VULKAN_HPP_NAMESPACE::ExtensionProperties *   pStdExtensionVersion = {};
  };
  static_assert( sizeof( VideoDecodeH265SessionCreateInfoEXT ) == sizeof( VkVideoDecodeH265SessionCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH265SessionCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265SessionCreateInfoEXT>
  {
    using Type = VideoDecodeH265SessionCreateInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH265SessionParametersAddInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoDecodeH265SessionParametersAddInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT(
      uint32_t                                 spsStdCount_ = {},
      const StdVideoH265SequenceParameterSet * pSpsStd_     = {},
      uint32_t                                 ppsStdCount_ = {},
      const StdVideoH265PictureParameterSet *  pPpsStd_     = {} ) VULKAN_HPP_NOEXCEPT
      : spsStdCount( spsStdCount_ )
      , pSpsStd( pSpsStd_ )
      , ppsStdCount( ppsStdCount_ )
      , pPpsStd( pPpsStd_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT(
      VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265SessionParametersAddInfoEXT( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265SessionParametersAddInfoEXT(
          *reinterpret_cast<VideoDecodeH265SessionParametersAddInfoEXT const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265SessionParametersAddInfoEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const &  ppsStd_ = {} )
      : spsStdCount( static_cast<uint32_t>( spsStd_.size() ) )
      , pSpsStd( spsStd_.data() )
      , ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) )
      , pPpsStd( ppsStd_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT &
                            operator=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265SessionParametersAddInfoEXT &
      operator=( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH265SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      spsStdCount = spsStdCount_;
      return *this;
    }

    VideoDecodeH265SessionParametersAddInfoEXT &
      setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
    {
      pSpsStd = pSpsStd_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265SessionParametersAddInfoEXT &
      setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265SequenceParameterSet> const & spsStd_ )
        VULKAN_HPP_NOEXCEPT
    {
      spsStdCount = static_cast<uint32_t>( spsStd_.size() );
      pSpsStd     = spsStd_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoDecodeH265SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      ppsStdCount = ppsStdCount_;
      return *this;
    }

    VideoDecodeH265SessionParametersAddInfoEXT &
      setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
    {
      pPpsStd = pPpsStd_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoDecodeH265SessionParametersAddInfoEXT &
      setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH265PictureParameterSet> const & ppsStd_ )
        VULKAN_HPP_NOEXCEPT
    {
      ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
      pPpsStd     = ppsStd_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkVideoDecodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265SessionParametersAddInfoEXT *>( this );
    }

    operator VkVideoDecodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265SessionParametersAddInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265SessionParametersAddInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) &&
             ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd );
    }

    bool operator!=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType       = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT;
    const void *                             pNext       = {};
    uint32_t                                 spsStdCount = {};
    const StdVideoH265SequenceParameterSet * pSpsStd     = {};
    uint32_t                                 ppsStdCount = {};
    const StdVideoH265PictureParameterSet *  pPpsStd     = {};
  };
  static_assert( sizeof( VideoDecodeH265SessionParametersAddInfoEXT ) ==
                   sizeof( VkVideoDecodeH265SessionParametersAddInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH265SessionParametersAddInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersAddInfoEXT>
  {
    using Type = VideoDecodeH265SessionParametersAddInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoDecodeH265SessionParametersCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT(
      uint32_t                                                                 maxSpsStdCount_     = {},
      uint32_t                                                                 maxPpsStdCount_     = {},
      const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {} )
      VULKAN_HPP_NOEXCEPT
      : maxSpsStdCount( maxSpsStdCount_ )
      , maxPpsStdCount( maxPpsStdCount_ )
      , pParametersAddInfo( pParametersAddInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersCreateInfoEXT(
      VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265SessionParametersCreateInfoEXT( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : VideoDecodeH265SessionParametersCreateInfoEXT(
          *reinterpret_cast<VideoDecodeH265SessionParametersCreateInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT &
                            operator=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoDecodeH265SessionParametersCreateInfoEXT &
      operator=( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT const *>( &rhs );
      return *this;
    }

    VideoDecodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoDecodeH265SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSpsStdCount = maxSpsStdCount_;
      return *this;
    }

    VideoDecodeH265SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPpsStdCount = maxPpsStdCount_;
      return *this;
    }

    VideoDecodeH265SessionParametersCreateInfoEXT & setPParametersAddInfo(
      const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pParametersAddInfo = pParametersAddInfo_;
      return *this;
    }

    operator VkVideoDecodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoDecodeH265SessionParametersCreateInfoEXT *>( this );
    }

    operator VkVideoDecodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoDecodeH265SessionParametersCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoDecodeH265SessionParametersCreateInfoEXT const & ) const = default;
#  else
    bool operator==( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) &&
             ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
    }

    bool operator!=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT;
    const void *                        pNext          = {};
    uint32_t                            maxSpsStdCount = {};
    uint32_t                            maxPpsStdCount = {};
    const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {};
  };
  static_assert( sizeof( VideoDecodeH265SessionParametersCreateInfoEXT ) ==
                   sizeof( VkVideoDecodeH265SessionParametersCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoDecodeH265SessionParametersCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT>
  {
    using Type = VideoDecodeH265SessionParametersCreateInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeH264CapabilitiesEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT(
      VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesFlagsEXT flags_                   = {},
      VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT    inputModeFlags_          = {},
      VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT   outputModeFlags_         = {},
      VULKAN_HPP_NAMESPACE::Extent2D                            minPictureSizeInMbs_     = {},
      VULKAN_HPP_NAMESPACE::Extent2D                            maxPictureSizeInMbs_     = {},
      VULKAN_HPP_NAMESPACE::Extent2D                            inputImageDataAlignment_ = {},
      uint8_t                                                   maxNumL0ReferenceForP_   = {},
      uint8_t                                                   maxNumL0ReferenceForB_   = {},
      uint8_t                                                   maxNumL1Reference_       = {},
      uint8_t                                                   qualityLevelCount_       = {},
      VULKAN_HPP_NAMESPACE::ExtensionProperties                 stdExtensionVersion_     = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , inputModeFlags( inputModeFlags_ )
      , outputModeFlags( outputModeFlags_ )
      , minPictureSizeInMbs( minPictureSizeInMbs_ )
      , maxPictureSizeInMbs( maxPictureSizeInMbs_ )
      , inputImageDataAlignment( inputImageDataAlignment_ )
      , maxNumL0ReferenceForP( maxNumL0ReferenceForP_ )
      , maxNumL0ReferenceForB( maxNumL0ReferenceForB_ )
      , maxNumL1Reference( maxNumL1Reference_ )
      , qualityLevelCount( qualityLevelCount_ )
      , stdExtensionVersion( stdExtensionVersion_ )
    {}

    VULKAN_HPP_CONSTEXPR_14
      VideoEncodeH264CapabilitiesEXT( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264CapabilitiesEXT( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264CapabilitiesEXT( *reinterpret_cast<VideoEncodeH264CapabilitiesEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264CapabilitiesEXT &
                            operator=( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264CapabilitiesEXT & operator=( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const *>( &rhs );
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT &
      setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT &
      setInputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      inputModeFlags = inputModeFlags_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT &
      setOutputModeFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ ) VULKAN_HPP_NOEXCEPT
    {
      outputModeFlags = outputModeFlags_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT &
      setMinPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & minPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT
    {
      minPictureSizeInMbs = minPictureSizeInMbs_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT &
      setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPictureSizeInMbs = maxPictureSizeInMbs_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT &
      setInputImageDataAlignment( VULKAN_HPP_NAMESPACE::Extent2D const & inputImageDataAlignment_ ) VULKAN_HPP_NOEXCEPT
    {
      inputImageDataAlignment = inputImageDataAlignment_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT & setMaxNumL0ReferenceForP( uint8_t maxNumL0ReferenceForP_ ) VULKAN_HPP_NOEXCEPT
    {
      maxNumL0ReferenceForP = maxNumL0ReferenceForP_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT & setMaxNumL0ReferenceForB( uint8_t maxNumL0ReferenceForB_ ) VULKAN_HPP_NOEXCEPT
    {
      maxNumL0ReferenceForB = maxNumL0ReferenceForB_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT & setMaxNumL1Reference( uint8_t maxNumL1Reference_ ) VULKAN_HPP_NOEXCEPT
    {
      maxNumL1Reference = maxNumL1Reference_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT & setQualityLevelCount( uint8_t qualityLevelCount_ ) VULKAN_HPP_NOEXCEPT
    {
      qualityLevelCount = qualityLevelCount_;
      return *this;
    }

    VideoEncodeH264CapabilitiesEXT & setStdExtensionVersion(
      VULKAN_HPP_NAMESPACE::ExtensionProperties const & stdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      stdExtensionVersion = stdExtensionVersion_;
      return *this;
    }

    operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264CapabilitiesEXT *>( this );
    }

    operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264CapabilitiesEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264CapabilitiesEXT const & ) const = default;
#  else
    bool operator==( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( inputModeFlags == rhs.inputModeFlags ) && ( outputModeFlags == rhs.outputModeFlags ) &&
             ( minPictureSizeInMbs == rhs.minPictureSizeInMbs ) && ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) &&
             ( inputImageDataAlignment == rhs.inputImageDataAlignment ) &&
             ( maxNumL0ReferenceForP == rhs.maxNumL0ReferenceForP ) &&
             ( maxNumL0ReferenceForB == rhs.maxNumL0ReferenceForB ) && ( maxNumL1Reference == rhs.maxNumL1Reference ) &&
             ( qualityLevelCount == rhs.qualityLevelCount ) && ( stdExtensionVersion == rhs.stdExtensionVersion );
    }

    bool operator!=( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                       sType = StructureType::eVideoEncodeH264CapabilitiesEXT;
    const void *                                              pNext = {};
    VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesFlagsEXT flags = {};
    VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT    inputModeFlags          = {};
    VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT   outputModeFlags         = {};
    VULKAN_HPP_NAMESPACE::Extent2D                            minPictureSizeInMbs     = {};
    VULKAN_HPP_NAMESPACE::Extent2D                            maxPictureSizeInMbs     = {};
    VULKAN_HPP_NAMESPACE::Extent2D                            inputImageDataAlignment = {};
    uint8_t                                                   maxNumL0ReferenceForP   = {};
    uint8_t                                                   maxNumL0ReferenceForB   = {};
    uint8_t                                                   maxNumL1Reference       = {};
    uint8_t                                                   qualityLevelCount       = {};
    VULKAN_HPP_NAMESPACE::ExtensionProperties                 stdExtensionVersion     = {};
  };
  static_assert( sizeof( VideoEncodeH264CapabilitiesEXT ) == sizeof( VkVideoEncodeH264CapabilitiesEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeH264CapabilitiesEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264CapabilitiesEXT>
  {
    using Type = VideoEncodeH264CapabilitiesEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeH264DpbSlotInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264DpbSlotInfoEXT( int8_t                                slotIndex_       = {},
                                     const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : slotIndex( slotIndex_ )
      , pStdPictureInfo( pStdPictureInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264DpbSlotInfoEXT( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264DpbSlotInfoEXT( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264DpbSlotInfoEXT( *reinterpret_cast<VideoEncodeH264DpbSlotInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT &
                            operator=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264DpbSlotInfoEXT & operator=( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT const *>( &rhs );
      return *this;
    }

    VideoEncodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeH264DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT
    {
      slotIndex = slotIndex_;
      return *this;
    }

    VideoEncodeH264DpbSlotInfoEXT &
      setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdPictureInfo = pStdPictureInfo_;
      return *this;
    }

    operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264DpbSlotInfoEXT *>( this );
    }

    operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264DpbSlotInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264DpbSlotInfoEXT const & ) const = default;
#  else
    bool operator==( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) &&
             ( pStdPictureInfo == rhs.pStdPictureInfo );
    }

    bool operator!=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType   sType           = StructureType::eVideoEncodeH264DpbSlotInfoEXT;
    const void *                          pNext           = {};
    int8_t                                slotIndex       = {};
    const StdVideoEncodeH264PictureInfo * pStdPictureInfo = {};
  };
  static_assert( sizeof( VideoEncodeH264DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH264DpbSlotInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeH264DpbSlotInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264DpbSlotInfoEXT>
  {
    using Type = VideoEncodeH264DpbSlotInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeH264EmitPictureParametersEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoEncodeH264EmitPictureParametersEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264EmitPictureParametersEXT( uint8_t                      spsId_           = {},
                                               VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_   = {},
                                               uint32_t                     ppsIdEntryCount_ = {},
                                               const uint8_t *              ppsIdEntries_    = {} ) VULKAN_HPP_NOEXCEPT
      : spsId( spsId_ )
      , emitSpsEnable( emitSpsEnable_ )
      , ppsIdEntryCount( ppsIdEntryCount_ )
      , ppsIdEntries( ppsIdEntries_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersEXT( VideoEncodeH264EmitPictureParametersEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264EmitPictureParametersEXT( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264EmitPictureParametersEXT(
          *reinterpret_cast<VideoEncodeH264EmitPictureParametersEXT const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264EmitPictureParametersEXT(
      uint8_t                                                              spsId_,
      VULKAN_HPP_NAMESPACE::Bool32                                         emitSpsEnable_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ )
      : spsId( spsId_ )
      , emitSpsEnable( emitSpsEnable_ )
      , ppsIdEntryCount( static_cast<uint32_t>( psIdEntries_.size() ) )
      , ppsIdEntries( psIdEntries_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersEXT &
                            operator=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264EmitPictureParametersEXT &
      operator=( VkVideoEncodeH264EmitPictureParametersEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersEXT const *>( &rhs );
      return *this;
    }

    VideoEncodeH264EmitPictureParametersEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeH264EmitPictureParametersEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT
    {
      spsId = spsId_;
      return *this;
    }

    VideoEncodeH264EmitPictureParametersEXT &
      setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT
    {
      emitSpsEnable = emitSpsEnable_;
      return *this;
    }

    VideoEncodeH264EmitPictureParametersEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      ppsIdEntryCount = ppsIdEntryCount_;
      return *this;
    }

    VideoEncodeH264EmitPictureParametersEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      ppsIdEntries = ppsIdEntries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264EmitPictureParametersEXT & setPsIdEntries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint8_t> const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      ppsIdEntryCount = static_cast<uint32_t>( psIdEntries_.size() );
      ppsIdEntries    = psIdEntries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkVideoEncodeH264EmitPictureParametersEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264EmitPictureParametersEXT *>( this );
    }

    operator VkVideoEncodeH264EmitPictureParametersEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264EmitPictureParametersEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264EmitPictureParametersEXT const & ) const = default;
#  else
    bool operator==( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsId == rhs.spsId ) &&
             ( emitSpsEnable == rhs.emitSpsEnable ) && ( ppsIdEntryCount == rhs.ppsIdEntryCount ) &&
             ( ppsIdEntries == rhs.ppsIdEntries );
    }

    bool operator!=( VideoEncodeH264EmitPictureParametersEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType           = StructureType::eVideoEncodeH264EmitPictureParametersEXT;
    const void *                        pNext           = {};
    uint8_t                             spsId           = {};
    VULKAN_HPP_NAMESPACE::Bool32        emitSpsEnable   = {};
    uint32_t                            ppsIdEntryCount = {};
    const uint8_t *                     ppsIdEntries    = {};
  };
  static_assert( sizeof( VideoEncodeH264EmitPictureParametersEXT ) ==
                   sizeof( VkVideoEncodeH264EmitPictureParametersEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeH264EmitPictureParametersEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264EmitPictureParametersEXT>
  {
    using Type = VideoEncodeH264EmitPictureParametersEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeH264NaluSliceEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceEXT(
      const StdVideoEncodeH264SliceHeader *                       pSliceHeaderStd_         = {},
      uint32_t                                                    mbCount_                 = {},
      uint8_t                                                     refFinalList0EntryCount_ = {},
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_   = {},
      uint8_t                                                     refFinalList1EntryCount_ = {},
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_   = {},
      uint32_t                                                    precedingNaluBytes_      = {},
      uint8_t                                                     minQp_                   = {},
      uint8_t                                                     maxQp_                   = {} ) VULKAN_HPP_NOEXCEPT
      : pSliceHeaderStd( pSliceHeaderStd_ )
      , mbCount( mbCount_ )
      , refFinalList0EntryCount( refFinalList0EntryCount_ )
      , pRefFinalList0Entries( pRefFinalList0Entries_ )
      , refFinalList1EntryCount( refFinalList1EntryCount_ )
      , pRefFinalList1Entries( pRefFinalList1Entries_ )
      , precedingNaluBytes( precedingNaluBytes_ )
      , minQp( minQp_ )
      , maxQp( maxQp_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264NaluSliceEXT( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264NaluSliceEXT( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264NaluSliceEXT( *reinterpret_cast<VideoEncodeH264NaluSliceEXT const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264NaluSliceEXT(
      const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_,
      uint32_t                              mbCount_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
        refFinalList0Entries_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
               refFinalList1Entries_ = {},
      uint32_t precedingNaluBytes_   = {},
      uint8_t  minQp_                = {},
      uint8_t  maxQp_                = {} )
      : pSliceHeaderStd( pSliceHeaderStd_ )
      , mbCount( mbCount_ )
      , refFinalList0EntryCount( static_cast<uint8_t>( refFinalList0Entries_.size() ) )
      , pRefFinalList0Entries( refFinalList0Entries_.data() )
      , refFinalList1EntryCount( static_cast<uint8_t>( refFinalList1Entries_.size() ) )
      , pRefFinalList1Entries( refFinalList1Entries_.data() )
      , precedingNaluBytes( precedingNaluBytes_ )
      , minQp( minQp_ )
      , maxQp( maxQp_ )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceEXT &
                            operator=( VideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264NaluSliceEXT & operator=( VkVideoEncodeH264NaluSliceEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT const *>( &rhs );
      return *this;
    }

    VideoEncodeH264NaluSliceEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeH264NaluSliceEXT &
      setPSliceHeaderStd( const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT
    {
      pSliceHeaderStd = pSliceHeaderStd_;
      return *this;
    }

    VideoEncodeH264NaluSliceEXT & setMbCount( uint32_t mbCount_ ) VULKAN_HPP_NOEXCEPT
    {
      mbCount = mbCount_;
      return *this;
    }

    VideoEncodeH264NaluSliceEXT & setRefFinalList0EntryCount( uint8_t refFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      refFinalList0EntryCount = refFinalList0EntryCount_;
      return *this;
    }

    VideoEncodeH264NaluSliceEXT & setPRefFinalList0Entries(
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT
    {
      pRefFinalList0Entries = pRefFinalList0Entries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264NaluSliceEXT & setRefFinalList0Entries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
        refFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT
    {
      refFinalList0EntryCount = static_cast<uint8_t>( refFinalList0Entries_.size() );
      pRefFinalList0Entries   = refFinalList0Entries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH264NaluSliceEXT & setRefFinalList1EntryCount( uint8_t refFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      refFinalList1EntryCount = refFinalList1EntryCount_;
      return *this;
    }

    VideoEncodeH264NaluSliceEXT & setPRefFinalList1Entries(
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT
    {
      pRefFinalList1Entries = pRefFinalList1Entries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264NaluSliceEXT & setRefFinalList1Entries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
        refFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT
    {
      refFinalList1EntryCount = static_cast<uint8_t>( refFinalList1Entries_.size() );
      pRefFinalList1Entries   = refFinalList1Entries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH264NaluSliceEXT & setPrecedingNaluBytes( uint32_t precedingNaluBytes_ ) VULKAN_HPP_NOEXCEPT
    {
      precedingNaluBytes = precedingNaluBytes_;
      return *this;
    }

    VideoEncodeH264NaluSliceEXT & setMinQp( uint8_t minQp_ ) VULKAN_HPP_NOEXCEPT
    {
      minQp = minQp_;
      return *this;
    }

    VideoEncodeH264NaluSliceEXT & setMaxQp( uint8_t maxQp_ ) VULKAN_HPP_NOEXCEPT
    {
      maxQp = maxQp_;
      return *this;
    }

    operator VkVideoEncodeH264NaluSliceEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264NaluSliceEXT *>( this );
    }

    operator VkVideoEncodeH264NaluSliceEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264NaluSliceEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264NaluSliceEXT const & ) const = default;
#  else
    bool operator==( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSliceHeaderStd == rhs.pSliceHeaderStd ) &&
             ( mbCount == rhs.mbCount ) && ( refFinalList0EntryCount == rhs.refFinalList0EntryCount ) &&
             ( pRefFinalList0Entries == rhs.pRefFinalList0Entries ) &&
             ( refFinalList1EntryCount == rhs.refFinalList1EntryCount ) &&
             ( pRefFinalList1Entries == rhs.pRefFinalList1Entries ) &&
             ( precedingNaluBytes == rhs.precedingNaluBytes ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp );
    }

    bool operator!=( VideoEncodeH264NaluSliceEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                         sType = StructureType::eVideoEncodeH264NaluSliceEXT;
    const void *                                                pNext = {};
    const StdVideoEncodeH264SliceHeader *                       pSliceHeaderStd         = {};
    uint32_t                                                    mbCount                 = {};
    uint8_t                                                     refFinalList0EntryCount = {};
    const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList0Entries   = {};
    uint8_t                                                     refFinalList1EntryCount = {};
    const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefFinalList1Entries   = {};
    uint32_t                                                    precedingNaluBytes      = {};
    uint8_t                                                     minQp                   = {};
    uint8_t                                                     maxQp                   = {};
  };
  static_assert( sizeof( VideoEncodeH264NaluSliceEXT ) == sizeof( VkVideoEncodeH264NaluSliceEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeH264NaluSliceEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264NaluSliceEXT>
  {
    using Type = VideoEncodeH264NaluSliceEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeH264ProfileEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileEXT( StdVideoH264ProfileIdc stdProfileIdc_ = {} ) VULKAN_HPP_NOEXCEPT
      : stdProfileIdc( stdProfileIdc_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264ProfileEXT( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264ProfileEXT( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264ProfileEXT( *reinterpret_cast<VideoEncodeH264ProfileEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileEXT &
                            operator=( VideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264ProfileEXT & operator=( VkVideoEncodeH264ProfileEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileEXT const *>( &rhs );
      return *this;
    }

    VideoEncodeH264ProfileEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeH264ProfileEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT
    {
      stdProfileIdc = stdProfileIdc_;
      return *this;
    }

    operator VkVideoEncodeH264ProfileEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264ProfileEXT *>( this );
    }

    operator VkVideoEncodeH264ProfileEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264ProfileEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264ProfileEXT const & ) const = default;
#  else
    bool operator==( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 );
    }

    bool operator!=( VideoEncodeH264ProfileEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType         = StructureType::eVideoEncodeH264ProfileEXT;
    const void *                        pNext         = {};
    StdVideoH264ProfileIdc              stdProfileIdc = {};
  };
  static_assert( sizeof( VideoEncodeH264ProfileEXT ) == sizeof( VkVideoEncodeH264ProfileEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeH264ProfileEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264ProfileEXT>
  {
    using Type = VideoEncodeH264ProfileEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeH264SessionCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoEncodeH264SessionCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT(
      VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_                = {},
      VULKAN_HPP_NAMESPACE::Extent2D                      maxPictureSizeInMbs_  = {},
      const VULKAN_HPP_NAMESPACE::ExtensionProperties *   pStdExtensionVersion_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , maxPictureSizeInMbs( maxPictureSizeInMbs_ )
      , pStdExtensionVersion( pStdExtensionVersion_ )
    {}

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT( VideoEncodeH264SessionCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionCreateInfoEXT( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264SessionCreateInfoEXT( *reinterpret_cast<VideoEncodeH264SessionCreateInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoEXT &
                            operator=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionCreateInfoEXT &
      operator=( VkVideoEncodeH264SessionCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT const *>( &rhs );
      return *this;
    }

    VideoEncodeH264SessionCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeH264SessionCreateInfoEXT &
      setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VideoEncodeH264SessionCreateInfoEXT &
      setMaxPictureSizeInMbs( VULKAN_HPP_NAMESPACE::Extent2D const & maxPictureSizeInMbs_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPictureSizeInMbs = maxPictureSizeInMbs_;
      return *this;
    }

    VideoEncodeH264SessionCreateInfoEXT & setPStdExtensionVersion(
      const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdExtensionVersion_ ) VULKAN_HPP_NOEXCEPT
    {
      pStdExtensionVersion = pStdExtensionVersion_;
      return *this;
    }

    operator VkVideoEncodeH264SessionCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264SessionCreateInfoEXT *>( this );
    }

    operator VkVideoEncodeH264SessionCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264SessionCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264SessionCreateInfoEXT const & ) const = default;
#  else
    bool operator==( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( maxPictureSizeInMbs == rhs.maxPictureSizeInMbs ) && ( pStdExtensionVersion == rhs.pStdExtensionVersion );
    }

    bool operator!=( VideoEncodeH264SessionCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                 sType = StructureType::eVideoEncodeH264SessionCreateInfoEXT;
    const void *                                        pNext = {};
    VULKAN_HPP_NAMESPACE::VideoEncodeH264CreateFlagsEXT flags = {};
    VULKAN_HPP_NAMESPACE::Extent2D                      maxPictureSizeInMbs  = {};
    const VULKAN_HPP_NAMESPACE::ExtensionProperties *   pStdExtensionVersion = {};
  };
  static_assert( sizeof( VideoEncodeH264SessionCreateInfoEXT ) == sizeof( VkVideoEncodeH264SessionCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeH264SessionCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionCreateInfoEXT>
  {
    using Type = VideoEncodeH264SessionCreateInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeH264SessionParametersAddInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoEncodeH264SessionParametersAddInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT(
      uint32_t                                 spsStdCount_ = {},
      const StdVideoH264SequenceParameterSet * pSpsStd_     = {},
      uint32_t                                 ppsStdCount_ = {},
      const StdVideoH264PictureParameterSet *  pPpsStd_     = {} ) VULKAN_HPP_NOEXCEPT
      : spsStdCount( spsStdCount_ )
      , pSpsStd( pSpsStd_ )
      , ppsStdCount( ppsStdCount_ )
      , pPpsStd( pPpsStd_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT(
      VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionParametersAddInfoEXT( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264SessionParametersAddInfoEXT(
          *reinterpret_cast<VideoEncodeH264SessionParametersAddInfoEXT const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264SessionParametersAddInfoEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const &  ppsStd_ = {} )
      : spsStdCount( static_cast<uint32_t>( spsStd_.size() ) )
      , pSpsStd( spsStd_.data() )
      , ppsStdCount( static_cast<uint32_t>( ppsStd_.size() ) )
      , pPpsStd( ppsStd_.data() )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT &
                            operator=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionParametersAddInfoEXT &
      operator=( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT const *>( &rhs );
      return *this;
    }

    VideoEncodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      spsStdCount = spsStdCount_;
      return *this;
    }

    VideoEncodeH264SessionParametersAddInfoEXT &
      setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT
    {
      pSpsStd = pSpsStd_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264SessionParametersAddInfoEXT &
      setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264SequenceParameterSet> const & spsStd_ )
        VULKAN_HPP_NOEXCEPT
    {
      spsStdCount = static_cast<uint32_t>( spsStd_.size() );
      pSpsStd     = spsStd_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      ppsStdCount = ppsStdCount_;
      return *this;
    }

    VideoEncodeH264SessionParametersAddInfoEXT &
      setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT
    {
      pPpsStd = pPpsStd_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264SessionParametersAddInfoEXT &
      setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const StdVideoH264PictureParameterSet> const & ppsStd_ )
        VULKAN_HPP_NOEXCEPT
    {
      ppsStdCount = static_cast<uint32_t>( ppsStd_.size() );
      pPpsStd     = ppsStd_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264SessionParametersAddInfoEXT *>( this );
    }

    operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264SessionParametersAddInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264SessionParametersAddInfoEXT const & ) const = default;
#  else
    bool operator==( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) &&
             ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd );
    }

    bool operator!=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType      sType       = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT;
    const void *                             pNext       = {};
    uint32_t                                 spsStdCount = {};
    const StdVideoH264SequenceParameterSet * pSpsStd     = {};
    uint32_t                                 ppsStdCount = {};
    const StdVideoH264PictureParameterSet *  pPpsStd     = {};
  };
  static_assert( sizeof( VideoEncodeH264SessionParametersAddInfoEXT ) ==
                   sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeH264SessionParametersAddInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersAddInfoEXT>
  {
    using Type = VideoEncodeH264SessionParametersAddInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeH264SessionParametersCreateInfoEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT(
      uint32_t                                                                 maxSpsStdCount_     = {},
      uint32_t                                                                 maxPpsStdCount_     = {},
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {} )
      VULKAN_HPP_NOEXCEPT
      : maxSpsStdCount( maxSpsStdCount_ )
      , maxPpsStdCount( maxPpsStdCount_ )
      , pParametersAddInfo( pParametersAddInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersCreateInfoEXT(
      VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionParametersCreateInfoEXT( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs )
      VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264SessionParametersCreateInfoEXT(
          *reinterpret_cast<VideoEncodeH264SessionParametersCreateInfoEXT const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT &
                            operator=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264SessionParametersCreateInfoEXT &
      operator=( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT const *>( &rhs );
      return *this;
    }

    VideoEncodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxSpsStdCount = maxSpsStdCount_;
      return *this;
    }

    VideoEncodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT
    {
      maxPpsStdCount = maxPpsStdCount_;
      return *this;
    }

    VideoEncodeH264SessionParametersCreateInfoEXT & setPParametersAddInfo(
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pParametersAddInfo = pParametersAddInfo_;
      return *this;
    }

    operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264SessionParametersCreateInfoEXT *>( this );
    }

    operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264SessionParametersCreateInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264SessionParametersCreateInfoEXT const & ) const = default;
#  else
    bool operator==( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) &&
             ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo );
    }

    bool operator!=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType          = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT;
    const void *                        pNext          = {};
    uint32_t                            maxSpsStdCount = {};
    uint32_t                            maxPpsStdCount = {};
    const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {};
  };
  static_assert( sizeof( VideoEncodeH264SessionParametersCreateInfoEXT ) ==
                   sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeH264SessionParametersCreateInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT>
  {
    using Type = VideoEncodeH264SessionParametersCreateInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeH264VclFrameInfoEXT
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264VclFrameInfoEXT;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT(
      uint8_t                                                     refDefaultFinalList0EntryCount_ = {},
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_   = {},
      uint8_t                                                     refDefaultFinalList1EntryCount_ = {},
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_   = {},
      uint32_t                                                    naluSliceEntryCount_            = {},
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT *   pNaluSliceEntries_              = {},
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} ) VULKAN_HPP_NOEXCEPT
      : refDefaultFinalList0EntryCount( refDefaultFinalList0EntryCount_ )
      , pRefDefaultFinalList0Entries( pRefDefaultFinalList0Entries_ )
      , refDefaultFinalList1EntryCount( refDefaultFinalList1EntryCount_ )
      , pRefDefaultFinalList1Entries( pRefDefaultFinalList1Entries_ )
      , naluSliceEntryCount( naluSliceEntryCount_ )
      , pNaluSliceEntries( pNaluSliceEntries_ )
      , pCurrentPictureInfo( pCurrentPictureInfo_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoEncodeH264VclFrameInfoEXT( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264VclFrameInfoEXT( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeH264VclFrameInfoEXT( *reinterpret_cast<VideoEncodeH264VclFrameInfoEXT const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264VclFrameInfoEXT(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
        refDefaultFinalList0Entries_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
        refDefaultFinalList1Entries_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT> const &
                                                                  naluSliceEntries_    = {},
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ = {} )
      : refDefaultFinalList0EntryCount( static_cast<uint8_t>( refDefaultFinalList0Entries_.size() ) )
      , pRefDefaultFinalList0Entries( refDefaultFinalList0Entries_.data() )
      , refDefaultFinalList1EntryCount( static_cast<uint8_t>( refDefaultFinalList1Entries_.size() ) )
      , pRefDefaultFinalList1Entries( refDefaultFinalList1Entries_.data() )
      , naluSliceEntryCount( static_cast<uint32_t>( naluSliceEntries_.size() ) )
      , pNaluSliceEntries( naluSliceEntries_.data() )
      , pCurrentPictureInfo( pCurrentPictureInfo_ )
    {}
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT &
                            operator=( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeH264VclFrameInfoEXT & operator=( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT const *>( &rhs );
      return *this;
    }

    VideoEncodeH264VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeH264VclFrameInfoEXT &
      setRefDefaultFinalList0EntryCount( uint8_t refDefaultFinalList0EntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      refDefaultFinalList0EntryCount = refDefaultFinalList0EntryCount_;
      return *this;
    }

    VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList0Entries(
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT
    {
      pRefDefaultFinalList0Entries = pRefDefaultFinalList0Entries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList0Entries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
        refDefaultFinalList0Entries_ ) VULKAN_HPP_NOEXCEPT
    {
      refDefaultFinalList0EntryCount = static_cast<uint8_t>( refDefaultFinalList0Entries_.size() );
      pRefDefaultFinalList0Entries   = refDefaultFinalList0Entries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH264VclFrameInfoEXT &
      setRefDefaultFinalList1EntryCount( uint8_t refDefaultFinalList1EntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      refDefaultFinalList1EntryCount = refDefaultFinalList1EntryCount_;
      return *this;
    }

    VideoEncodeH264VclFrameInfoEXT & setPRefDefaultFinalList1Entries(
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT
    {
      pRefDefaultFinalList1Entries = pRefDefaultFinalList1Entries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264VclFrameInfoEXT & setRefDefaultFinalList1Entries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT> const &
        refDefaultFinalList1Entries_ ) VULKAN_HPP_NOEXCEPT
    {
      refDefaultFinalList1EntryCount = static_cast<uint8_t>( refDefaultFinalList1Entries_.size() );
      pRefDefaultFinalList1Entries   = refDefaultFinalList1Entries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT
    {
      naluSliceEntryCount = naluSliceEntryCount_;
      return *this;
    }

    VideoEncodeH264VclFrameInfoEXT & setPNaluSliceEntries(
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      pNaluSliceEntries = pNaluSliceEntries_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntries(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT> const &
        naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT
    {
      naluSliceEntryCount = static_cast<uint32_t>( naluSliceEntries_.size() );
      pNaluSliceEntries   = naluSliceEntries_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    VideoEncodeH264VclFrameInfoEXT & setPCurrentPictureInfo(
      const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT
    {
      pCurrentPictureInfo = pCurrentPictureInfo_;
      return *this;
    }

    operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeH264VclFrameInfoEXT *>( this );
    }

    operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeH264VclFrameInfoEXT *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeH264VclFrameInfoEXT const & ) const = default;
#  else
    bool operator==( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( refDefaultFinalList0EntryCount == rhs.refDefaultFinalList0EntryCount ) &&
             ( pRefDefaultFinalList0Entries == rhs.pRefDefaultFinalList0Entries ) &&
             ( refDefaultFinalList1EntryCount == rhs.refDefaultFinalList1EntryCount ) &&
             ( pRefDefaultFinalList1Entries == rhs.pRefDefaultFinalList1Entries ) &&
             ( naluSliceEntryCount == rhs.naluSliceEntryCount ) && ( pNaluSliceEntries == rhs.pNaluSliceEntries ) &&
             ( pCurrentPictureInfo == rhs.pCurrentPictureInfo );
    }

    bool operator!=( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                         sType = StructureType::eVideoEncodeH264VclFrameInfoEXT;
    const void *                                                pNext = {};
    uint8_t                                                     refDefaultFinalList0EntryCount = {};
    const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList0Entries   = {};
    uint8_t                                                     refDefaultFinalList1EntryCount = {};
    const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pRefDefaultFinalList1Entries   = {};
    uint32_t                                                    naluSliceEntryCount            = {};
    const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceEXT *   pNaluSliceEntries              = {};
    const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pCurrentPictureInfo            = {};
  };
  static_assert( sizeof( VideoEncodeH264VclFrameInfoEXT ) == sizeof( VkVideoEncodeH264VclFrameInfoEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeH264VclFrameInfoEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeH264VclFrameInfoEXT>
  {
    using Type = VideoEncodeH264VclFrameInfoEXT;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoEncodeRateControlInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR        flags_ = {},
                                     VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ =
                                       VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone,
                                     uint32_t averageBitrate_            = {},
                                     uint16_t peakToAverageBitrateRatio_ = {},
                                     uint16_t frameRateNumerator_        = {},
                                     uint16_t frameRateDenominator_      = {},
                                     uint32_t virtualBufferSizeInMs_     = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , rateControlMode( rateControlMode_ )
      , averageBitrate( averageBitrate_ )
      , peakToAverageBitrateRatio( peakToAverageBitrateRatio_ )
      , frameRateNumerator( frameRateNumerator_ )
      , frameRateDenominator( frameRateDenominator_ )
      , virtualBufferSizeInMs( virtualBufferSizeInMs_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoEncodeRateControlInfoKHR( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeRateControlInfoKHR( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoEncodeRateControlInfoKHR( *reinterpret_cast<VideoEncodeRateControlInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR &
                            operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR const *>( &rhs );
      return *this;
    }

    VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoEncodeRateControlInfoKHR &
      setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    VideoEncodeRateControlInfoKHR & setRateControlMode(
      VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT
    {
      rateControlMode = rateControlMode_;
      return *this;
    }

    VideoEncodeRateControlInfoKHR & setAverageBitrate( uint32_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT
    {
      averageBitrate = averageBitrate_;
      return *this;
    }

    VideoEncodeRateControlInfoKHR &
      setPeakToAverageBitrateRatio( uint16_t peakToAverageBitrateRatio_ ) VULKAN_HPP_NOEXCEPT
    {
      peakToAverageBitrateRatio = peakToAverageBitrateRatio_;
      return *this;
    }

    VideoEncodeRateControlInfoKHR & setFrameRateNumerator( uint16_t frameRateNumerator_ ) VULKAN_HPP_NOEXCEPT
    {
      frameRateNumerator = frameRateNumerator_;
      return *this;
    }

    VideoEncodeRateControlInfoKHR & setFrameRateDenominator( uint16_t frameRateDenominator_ ) VULKAN_HPP_NOEXCEPT
    {
      frameRateDenominator = frameRateDenominator_;
      return *this;
    }

    VideoEncodeRateControlInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT
    {
      virtualBufferSizeInMs = virtualBufferSizeInMs_;
      return *this;
    }

    operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoEncodeRateControlInfoKHR *>( this );
    }

    operator VkVideoEncodeRateControlInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoEncodeRateControlInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default;
#  else
    bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( rateControlMode == rhs.rateControlMode ) && ( averageBitrate == rhs.averageBitrate ) &&
             ( peakToAverageBitrateRatio == rhs.peakToAverageBitrateRatio ) &&
             ( frameRateNumerator == rhs.frameRateNumerator ) && ( frameRateDenominator == rhs.frameRateDenominator ) &&
             ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs );
    }

    bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                         sType = StructureType::eVideoEncodeRateControlInfoKHR;
    const void *                                                pNext = {};
    VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR        flags = {};
    VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode =
      VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone;
    uint32_t averageBitrate            = {};
    uint16_t peakToAverageBitrateRatio = {};
    uint16_t frameRateNumerator        = {};
    uint16_t frameRateDenominator      = {};
    uint32_t virtualBufferSizeInMs     = {};
  };
  static_assert( sizeof( VideoEncodeRateControlInfoKHR ) == sizeof( VkVideoEncodeRateControlInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoEncodeRateControlInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoEncodeRateControlInfoKHR>
  {
    using Type = VideoEncodeRateControlInfoKHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  struct VideoQueueFamilyProperties2KHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoQueueFamilyProperties2KHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR VideoQueueFamilyProperties2KHR(
      VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {} ) VULKAN_HPP_NOEXCEPT
      : videoCodecOperations( videoCodecOperations_ )
    {}

    VULKAN_HPP_CONSTEXPR
      VideoQueueFamilyProperties2KHR( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoQueueFamilyProperties2KHR( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : VideoQueueFamilyProperties2KHR( *reinterpret_cast<VideoQueueFamilyProperties2KHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 VideoQueueFamilyProperties2KHR &
                            operator=( VideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    VideoQueueFamilyProperties2KHR & operator=( VkVideoQueueFamilyProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VideoQueueFamilyProperties2KHR const *>( &rhs );
      return *this;
    }

    VideoQueueFamilyProperties2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    VideoQueueFamilyProperties2KHR & setVideoCodecOperations(
      VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ ) VULKAN_HPP_NOEXCEPT
    {
      videoCodecOperations = videoCodecOperations_;
      return *this;
    }

    operator VkVideoQueueFamilyProperties2KHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkVideoQueueFamilyProperties2KHR *>( this );
    }

    operator VkVideoQueueFamilyProperties2KHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkVideoQueueFamilyProperties2KHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( VideoQueueFamilyProperties2KHR const & ) const = default;
#  else
    bool operator==( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations );
    }

    bool operator!=( VideoQueueFamilyProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType               sType = StructureType::eVideoQueueFamilyProperties2KHR;
    void *                                            pNext = {};
    VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {};
  };
  static_assert( sizeof( VideoQueueFamilyProperties2KHR ) == sizeof( VkVideoQueueFamilyProperties2KHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<VideoQueueFamilyProperties2KHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eVideoQueueFamilyProperties2KHR>
  {
    using Type = VideoQueueFamilyProperties2KHR;
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  struct WaylandSurfaceCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_   = {},
                                                      struct wl_display *                                display_ = {},
                                                      struct wl_surface * surface_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , display( display_ )
      , surface( surface_ )
    {}

    VULKAN_HPP_CONSTEXPR
      WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : WaylandSurfaceCreateInfoKHR( *reinterpret_cast<WaylandSurfaceCreateInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR &
                            operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

    WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    WaylandSurfaceCreateInfoKHR &
      setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display * display_ ) VULKAN_HPP_NOEXCEPT
    {
      display = display_;
      return *this;
    }

    WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface * surface_ ) VULKAN_HPP_NOEXCEPT
    {
      surface = surface_;
      return *this;
    }

    operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( this );
    }

    operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WaylandSurfaceCreateInfoKHR const & ) const = default;
#  else
    bool operator==( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( display == rhs.display ) &&
             ( surface == rhs.surface );
    }

    bool operator!=( WaylandSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType                sType   = StructureType::eWaylandSurfaceCreateInfoKHR;
    const void *                                       pNext   = {};
    VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags   = {};
    struct wl_display *                                display = {};
    struct wl_surface *                                surface = {};
  };
  static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<WaylandSurfaceCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eWaylandSurfaceCreateInfoKHR>
  {
    using Type = WaylandSurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct Win32KeyedMutexAcquireReleaseInfoKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t                                   acquireCount_     = {},
                                            const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_    = {},
                                            const uint64_t *                           pAcquireKeys_     = {},
                                            const uint32_t *                           pAcquireTimeouts_ = {},
                                            uint32_t                                   releaseCount_     = {},
                                            const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_    = {},
                                            const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT
      : acquireCount( acquireCount_ )
      , pAcquireSyncs( pAcquireSyncs_ )
      , pAcquireKeys( pAcquireKeys_ )
      , pAcquireTimeouts( pAcquireTimeouts_ )
      , releaseCount( releaseCount_ )
      , pReleaseSyncs( pReleaseSyncs_ )
      , pReleaseKeys( pReleaseKeys_ )
    {}

    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const &                           acquireKeys_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_                       = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const &
                                                                            releaseSyncs_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_  = {} )
      : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) )
      , pAcquireSyncs( acquireSyncs_.data() )
      , pAcquireKeys( acquireKeys_.data() )
      , pAcquireTimeouts( acquireTimeouts_.data() )
      , releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) )
      , pReleaseSyncs( releaseSyncs_.data() )
      , pReleaseKeys( releaseKeys_.data() )
    {
#      ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() );
      VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() );
#      else
      if ( acquireSyncs_.size() != acquireKeys_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
      }
      if ( acquireSyncs_.size() != acquireTimeouts_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
      }
      if ( acquireKeys_.size() != acquireTimeouts_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
      }
#      endif /*VULKAN_HPP_NO_EXCEPTIONS*/

#      ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
#      else
      if ( releaseSyncs_.size() != releaseKeys_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
      }
#      endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR &
                            operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Win32KeyedMutexAcquireReleaseInfoKHR &
      operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs );
      return *this;
    }

    Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount = acquireCount_;
      return *this;
    }

    Win32KeyedMutexAcquireReleaseInfoKHR &
      setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireSyncs = pAcquireSyncs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ )
      VULKAN_HPP_NOEXCEPT
    {
      acquireCount  = static_cast<uint32_t>( acquireSyncs_.size() );
      pAcquireSyncs = acquireSyncs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireKeys = pAcquireKeys_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
      pAcquireKeys = acquireKeys_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t * pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireTimeouts = pAcquireTimeouts_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount     = static_cast<uint32_t>( acquireTimeouts_.size() );
      pAcquireTimeouts = acquireTimeouts_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount = releaseCount_;
      return *this;
    }

    Win32KeyedMutexAcquireReleaseInfoKHR &
      setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      pReleaseSyncs = pReleaseSyncs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ )
      VULKAN_HPP_NOEXCEPT
    {
      releaseCount  = static_cast<uint32_t>( releaseSyncs_.size() );
      pReleaseSyncs = releaseSyncs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      pReleaseKeys = pReleaseKeys_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
      pReleaseKeys = releaseKeys_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR *>( this );
    }

    operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const & ) const = default;
#  else
    bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) &&
             ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) &&
             ( pAcquireTimeouts == rhs.pAcquireTimeouts ) && ( releaseCount == rhs.releaseCount ) &&
             ( pReleaseSyncs == rhs.pReleaseSyncs ) && ( pReleaseKeys == rhs.pReleaseKeys );
    }

    bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType        sType            = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
    const void *                               pNext            = {};
    uint32_t                                   acquireCount     = {};
    const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs    = {};
    const uint64_t *                           pAcquireKeys     = {};
    const uint32_t *                           pAcquireTimeouts = {};
    uint32_t                                   releaseCount     = {};
    const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs    = {};
    const uint64_t *                           pReleaseKeys     = {};
  };
  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR>
  {
    using Type = Win32KeyedMutexAcquireReleaseInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct Win32KeyedMutexAcquireReleaseInfoNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR
      Win32KeyedMutexAcquireReleaseInfoNV( uint32_t                                   acquireCount_                = {},
                                           const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_               = {},
                                           const uint64_t *                           pAcquireKeys_                = {},
                                           const uint32_t *                           pAcquireTimeoutMilliseconds_ = {},
                                           uint32_t                                   releaseCount_                = {},
                                           const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_               = {},
                                           const uint64_t * pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT
      : acquireCount( acquireCount_ )
      , pAcquireSyncs( pAcquireSyncs_ )
      , pAcquireKeys( pAcquireKeys_ )
      , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
      , releaseCount( releaseCount_ )
      , pReleaseSyncs( pReleaseSyncs_ )
      , pReleaseKeys( pReleaseKeys_ )
    {}

    VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
      : Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs ) )
    {}

#    if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_,
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const &                           acquireKeys_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_            = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const &
                                                                            releaseSyncs_ = {},
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_  = {} )
      : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) )
      , pAcquireSyncs( acquireSyncs_.data() )
      , pAcquireKeys( acquireKeys_.data() )
      , pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() )
      , releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) )
      , pReleaseSyncs( releaseSyncs_.data() )
      , pReleaseKeys( releaseKeys_.data() )
    {
#      ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
      VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() );
      VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() );
#      else
      if ( acquireSyncs_.size() != acquireKeys_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
      }
      if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" );
      }
      if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" );
      }
#      endif /*VULKAN_HPP_NO_EXCEPTIONS*/

#      ifdef VULKAN_HPP_NO_EXCEPTIONS
      VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
#      else
      if ( releaseSyncs_.size() != releaseKeys_.size() )
      {
        throw LogicError(
          VULKAN_HPP_NAMESPACE_STRING
          "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
      }
#      endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    }
#    endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#  endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV &
                            operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Win32KeyedMutexAcquireReleaseInfoNV &
      operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs );
      return *this;
    }

    Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount = acquireCount_;
      return *this;
    }

    Win32KeyedMutexAcquireReleaseInfoNV &
      setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireSyncs = pAcquireSyncs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ )
      VULKAN_HPP_NOEXCEPT
    {
      acquireCount  = static_cast<uint32_t>( acquireSyncs_.size() );
      pAcquireSyncs = acquireSyncs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t * pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireKeys = pAcquireKeys_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
      pAcquireKeys = acquireKeys_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoNV &
      setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
    {
      pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ )
      VULKAN_HPP_NOEXCEPT
    {
      acquireCount                = static_cast<uint32_t>( acquireTimeoutMilliseconds_.size() );
      pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount = releaseCount_;
      return *this;
    }

    Win32KeyedMutexAcquireReleaseInfoNV &
      setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
    {
      pReleaseSyncs = pReleaseSyncs_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ )
      VULKAN_HPP_NOEXCEPT
    {
      releaseCount  = static_cast<uint32_t>( releaseSyncs_.size() );
      pReleaseSyncs = releaseSyncs_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t * pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      pReleaseKeys = pReleaseKeys_;
      return *this;
    }

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
    {
      releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
      pReleaseKeys = releaseKeys_.data();
      return *this;
    }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV *>( this );
    }

    operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const & ) const = default;
#  else
    bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireCount == rhs.acquireCount ) &&
             ( pAcquireSyncs == rhs.pAcquireSyncs ) && ( pAcquireKeys == rhs.pAcquireKeys ) &&
             ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) &&
             ( releaseCount == rhs.releaseCount ) && ( pReleaseSyncs == rhs.pReleaseSyncs ) &&
             ( pReleaseKeys == rhs.pReleaseKeys );
    }

    bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType        sType         = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
    const void *                               pNext         = {};
    uint32_t                                   acquireCount  = {};
    const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs = {};
    const uint64_t *                           pAcquireKeys  = {};
    const uint32_t *                           pAcquireTimeoutMilliseconds = {};
    uint32_t                                   releaseCount                = {};
    const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs               = {};
    const uint64_t *                           pReleaseKeys                = {};
  };
  static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoNV>
  {
    using Type = Win32KeyedMutexAcquireReleaseInfoNV;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  struct Win32SurfaceCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_     = {},
                                                    HINSTANCE                                        hinstance_ = {},
                                                    HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , hinstance( hinstance_ )
      , hwnd( hwnd_ )
    {}

    VULKAN_HPP_CONSTEXPR
      Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : Win32SurfaceCreateInfoKHR( *reinterpret_cast<Win32SurfaceCreateInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR &
                            operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

    Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT
    {
      hinstance = hinstance_;
      return *this;
    }

    Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT
    {
      hwnd = hwnd_;
      return *this;
    }

    operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( this );
    }

    operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Win32SurfaceCreateInfoKHR const & ) const = default;
#  else
    bool operator==( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( hinstance == rhs.hinstance ) && ( hwnd == rhs.hwnd );
    }

    bool operator!=( Win32SurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType              sType     = StructureType::eWin32SurfaceCreateInfoKHR;
    const void *                                     pNext     = {};
    VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags     = {};
    HINSTANCE                                        hinstance = {};
    HWND                                             hwnd      = {};
  };
  static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<Win32SurfaceCreateInfoKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eWin32SurfaceCreateInfoKHR>
  {
    using Type = Win32SurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  struct WriteDescriptorSetAccelerationStructureKHR
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eWriteDescriptorSetAccelerationStructureKHR;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(
      uint32_t                                               accelerationStructureCount_ = {},
      const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_    = {} ) VULKAN_HPP_NOEXCEPT
      : accelerationStructureCount( accelerationStructureCount_ )
      , pAccelerationStructures( pAccelerationStructures_ )
    {}

    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(
      WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs )
      VULKAN_HPP_NOEXCEPT
      : WriteDescriptorSetAccelerationStructureKHR(
          *reinterpret_cast<WriteDescriptorSetAccelerationStructureKHR const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetAccelerationStructureKHR(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &
        accelerationStructures_ )
      : accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) )
      , pAccelerationStructures( accelerationStructures_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR &
                            operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetAccelerationStructureKHR &
      operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const *>( &rhs );
      return *this;
    }

    WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    WriteDescriptorSetAccelerationStructureKHR &
      setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = accelerationStructureCount_;
      return *this;
    }

    WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures(
      const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      pAccelerationStructures = pAccelerationStructures_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &
        accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
      pAccelerationStructures    = accelerationStructures_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR *>( this );
    }

    operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const & ) const = default;
#else
    bool operator==( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( accelerationStructureCount == rhs.accelerationStructureCount ) &&
             ( pAccelerationStructures == rhs.pAccelerationStructures );
    }

    bool operator!=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
    const void *                        pNext = {};
    uint32_t                            accelerationStructureCount                 = {};
    const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures = {};
  };
  static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) ==
                   sizeof( VkWriteDescriptorSetAccelerationStructureKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<WriteDescriptorSetAccelerationStructureKHR>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureKHR>
  {
    using Type = WriteDescriptorSetAccelerationStructureKHR;
  };

  struct WriteDescriptorSetAccelerationStructureNV
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eWriteDescriptorSetAccelerationStructureNV;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(
      uint32_t                                              accelerationStructureCount_ = {},
      const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_    = {} ) VULKAN_HPP_NOEXCEPT
      : accelerationStructureCount( accelerationStructureCount_ )
      , pAccelerationStructures( pAccelerationStructures_ )
    {}

    VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(
      WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs )
      VULKAN_HPP_NOEXCEPT
      : WriteDescriptorSetAccelerationStructureNV(
          *reinterpret_cast<WriteDescriptorSetAccelerationStructureNV const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetAccelerationStructureNV(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const &
        accelerationStructures_ )
      : accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) )
      , pAccelerationStructures( accelerationStructures_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV &
                            operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetAccelerationStructureNV &
      operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const *>( &rhs );
      return *this;
    }

    WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    WriteDescriptorSetAccelerationStructureNV &
      setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = accelerationStructureCount_;
      return *this;
    }

    WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures(
      const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      pAccelerationStructures = pAccelerationStructures_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures(
      VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const &
        accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
    {
      accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
      pAccelerationStructures    = accelerationStructures_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV *>( this );
    }

    operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSetAccelerationStructureNV const & ) const = default;
#else
    bool operator==( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) &&
             ( accelerationStructureCount == rhs.accelerationStructureCount ) &&
             ( pAccelerationStructures == rhs.pAccelerationStructures );
    }

    bool operator!=( WriteDescriptorSetAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
    const void *                        pNext = {};
    uint32_t                            accelerationStructureCount                = {};
    const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures = {};
  };
  static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) ==
                   sizeof( VkWriteDescriptorSetAccelerationStructureNV ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<WriteDescriptorSetAccelerationStructureNV>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureNV>
  {
    using Type = WriteDescriptorSetAccelerationStructureNV;
  };

  struct WriteDescriptorSetInlineUniformBlockEXT
  {
    static const bool                    allowDuplicate = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType =
      StructureType::eWriteDescriptorSetInlineUniformBlockEXT;

#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t     dataSize_ = {},
                                                                  const void * pData_    = {} ) VULKAN_HPP_NOEXCEPT
      : dataSize( dataSize_ )
      , pData( pData_ )
    {}

    VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( WriteDescriptorSetInlineUniformBlockEXT const & rhs )
      VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
      : WriteDescriptorSetInlineUniformBlockEXT(
          *reinterpret_cast<WriteDescriptorSetInlineUniformBlockEXT const *>( &rhs ) )
    {}

#  if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    WriteDescriptorSetInlineUniformBlockEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ )
      : dataSize( static_cast<uint32_t>( data_.size() * sizeof( T ) ) ), pData( data_.data() )
    {}
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlockEXT &
                            operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    WriteDescriptorSetInlineUniformBlockEXT &
      operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const *>( &rhs );
      return *this;
    }

    WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    WriteDescriptorSetInlineUniformBlockEXT & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = dataSize_;
      return *this;
    }

    WriteDescriptorSetInlineUniformBlockEXT & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT
    {
      pData = pData_;
      return *this;
    }

#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
    template <typename T>
    WriteDescriptorSetInlineUniformBlockEXT &
      setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
    {
      dataSize = static_cast<uint32_t>( data_.size() * sizeof( T ) );
      pData    = data_.data();
      return *this;
    }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    operator VkWriteDescriptorSetInlineUniformBlockEXT const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlockEXT *>( this );
    }

    operator VkWriteDescriptorSetInlineUniformBlockEXT &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT *>( this );
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const & ) const = default;
#else
    bool operator==( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData );
    }

    bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType sType    = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
    const void *                        pNext    = {};
    uint32_t                            dataSize = {};
    const void *                        pData    = {};
  };
  static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) ==
                   sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<WriteDescriptorSetInlineUniformBlockEXT>::value,
                 "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlockEXT>
  {
    using Type = WriteDescriptorSetInlineUniformBlockEXT;
  };

#if defined( VK_USE_PLATFORM_XCB_KHR )
  struct XcbSurfaceCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_      = {},
                                                  xcb_connection_t *                             connection_ = {},
                                                  xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , connection( connection_ )
      , window( window_ )
    {}

    VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : XcbSurfaceCreateInfoKHR( *reinterpret_cast<XcbSurfaceCreateInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR &
                            operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

    XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t * connection_ ) VULKAN_HPP_NOEXCEPT
    {
      connection = connection_;
      return *this;
    }

    XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }

    operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( this );
    }

    operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( XcbSurfaceCreateInfoKHR const & ) const = default;
#  else
    bool operator==( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) &&
             ( connection == rhs.connection ) && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
    }

    bool operator!=( XcbSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType            sType      = StructureType::eXcbSurfaceCreateInfoKHR;
    const void *                                   pNext      = {};
    VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags      = {};
    xcb_connection_t *                             connection = {};
    xcb_window_t                                   window     = {};
  };
  static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eXcbSurfaceCreateInfoKHR>
  {
    using Type = XcbSurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_XCB_KHR*/

#if defined( VK_USE_PLATFORM_XLIB_KHR )
  struct XlibSurfaceCreateInfoKHR
  {
    static const bool                    allowDuplicate              = false;
    static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR;

#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
    VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {},
                                                   Display *                                       dpy_   = {},
                                                   Window window_ = {} ) VULKAN_HPP_NOEXCEPT
      : flags( flags_ )
      , dpy( dpy_ )
      , window( window_ )
    {}

    VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
      : XlibSurfaceCreateInfoKHR( *reinterpret_cast<XlibSurfaceCreateInfoKHR const *>( &rhs ) )
    {}
#  endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/

    VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR &
                            operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;

    XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
    {
      *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>( &rhs );
      return *this;
    }

    XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT
    {
      pNext = pNext_;
      return *this;
    }

    XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
    {
      flags = flags_;
      return *this;
    }

    XlibSurfaceCreateInfoKHR & setDpy( Display * dpy_ ) VULKAN_HPP_NOEXCEPT
    {
      dpy = dpy_;
      return *this;
    }

    XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT
    {
      window = window_;
      return *this;
    }

    operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( this );
    }

    operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
    {
      return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR *>( this );
    }

#  if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( XlibSurfaceCreateInfoKHR const & ) const = default;
#  else
    bool operator==( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dpy == rhs.dpy ) &&
             ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 );
    }

    bool operator!=( XlibSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return !operator==( rhs );
    }
#  endif

  public:
    VULKAN_HPP_NAMESPACE::StructureType             sType  = StructureType::eXlibSurfaceCreateInfoKHR;
    const void *                                    pNext  = {};
    VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags  = {};
    Display *                                       dpy    = {};
    Window                                          window = {};
  };
  static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ),
                 "struct and wrapper have different size!" );
  static_assert( std::is_standard_layout<XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );

  template <>
  struct CppType<StructureType, StructureType::eXlibSurfaceCreateInfoKHR>
  {
    using Type = XlibSurfaceCreateInfoKHR;
  };
#endif /*VK_USE_PLATFORM_XLIB_KHR*/

  class DebugReportCallbackEXT
  {
  public:
    using CType = VkDebugReportCallbackEXT;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;

  public:
    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default;
    VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT
      DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
      : m_debugReportCallbackEXT( debugReportCallbackEXT )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
    {
      m_debugReportCallbackEXT = debugReportCallbackEXT;
      return *this;
    }
#endif

    DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_debugReportCallbackEXT = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugReportCallbackEXT const & ) const = default;
#else
    bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
    }

    bool operator!=( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
    }

    bool operator<( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT
    {
      return m_debugReportCallbackEXT;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_debugReportCallbackEXT != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_debugReportCallbackEXT == VK_NULL_HANDLE;
    }

  private:
    VkDebugReportCallbackEXT m_debugReportCallbackEXT = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDebugReportCallbackEXT>
  {
    using type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT>
  {
    using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT>
  {
    using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  class DebugUtilsMessengerEXT
  {
  public:
    using CType = VkDebugUtilsMessengerEXT;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;

  public:
    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default;
    VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT
      DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
      : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
    {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
    {
      m_debugUtilsMessengerEXT = debugUtilsMessengerEXT;
      return *this;
    }
#endif

    DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_debugUtilsMessengerEXT = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( DebugUtilsMessengerEXT const & ) const = default;
#else
    bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT;
    }

    bool operator!=( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT;
    }

    bool operator<( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT;
    }
#endif

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
    {
      return m_debugUtilsMessengerEXT;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_debugUtilsMessengerEXT != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_debugUtilsMessengerEXT == VK_NULL_HANDLE;
    }

  private:
    VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED(
    "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eDebugUtilsMessengerEXT>
  {
    using type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT>
  {
    using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

#ifndef VULKAN_HPP_NO_SMART_HANDLE
  class Instance;
  template <typename Dispatch>
  class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Instance, Dispatch>;
  };
  using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<DebugUtilsMessengerEXT, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Instance, Dispatch>;
  };
  using UniqueDebugUtilsMessengerEXT = UniqueHandle<DebugUtilsMessengerEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  template <typename Dispatch>
  class UniqueHandleTraits<SurfaceKHR, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<Instance, Dispatch>;
  };
  using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/

  class Instance
  {
  public:
    using CType = VkInstance;

    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType =
      VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
    static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
      VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;

  public:
    VULKAN_HPP_CONSTEXPR         Instance() = default;
    VULKAN_HPP_CONSTEXPR         Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {}
    VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {}

#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
    Instance & operator=( VkInstance instance ) VULKAN_HPP_NOEXCEPT
    {
      m_instance = instance;
      return *this;
    }
#endif

    Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
    {
      m_instance = {};
      return *this;
    }

#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
    auto operator<=>( Instance const & ) const = default;
#else
    bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_instance == rhs.m_instance;
    }

    bool operator!=( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_instance != rhs.m_instance;
    }

    bool operator<( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
    {
      return m_instance < rhs.m_instance;
    }
#endif

    //=== VK_VERSION_1_0 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         enumeratePhysicalDevices( uint32_t *                             pPhysicalDeviceCount,
                                                   VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
      enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>,
              typename Dispatch                = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                       = PhysicalDeviceAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type = 0>
    VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
      enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    PFN_vkVoidFunction
      getProcAddr( const char *     pName,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    PFN_vkVoidFunction
      getProcAddr( const std::string & name,
                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_VERSION_1_1 ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups(
      uint32_t *                                            pPhysicalDeviceGroupCount,
      VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
              typename Dispatch                               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
      enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
              typename Dispatch                               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                                      = PhysicalDeviceGroupPropertiesAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value,
                                      int>::type              = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
      enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR                  surface,
                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void
      destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR                  surface,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR    surface,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    //=== VK_KHR_display ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR(
      const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *         pAllocator,
      VULKAN_HPP_NAMESPACE::SurfaceKHR *                        pSurface,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo,
                                    Optional<const AllocationCallbacks> allocator
                                                                        VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo,
                                          Optional<const AllocationCallbacks> allocator
                                                                              VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_XLIB_KHR )
    //=== VK_KHR_xlib_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
                                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks *      pAllocator,
                                               VULKAN_HPP_NAMESPACE::SurfaceKHR *                     pSurface,
                                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR &    createInfo,
                            Optional<const AllocationCallbacks> allocator
                                                                VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR &    createInfo,
                                  Optional<const AllocationCallbacks> allocator
                                                                      VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_XLIB_KHR*/

#if defined( VK_USE_PLATFORM_XCB_KHR )
    //=== VK_KHR_xcb_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
                                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks *     pAllocator,
                                              VULKAN_HPP_NAMESPACE::SurfaceKHR *                    pSurface,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR &     createInfo,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR &     createInfo,
                                 Optional<const AllocationCallbacks> allocator
                                                                     VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_XCB_KHR*/

#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
    //=== VK_KHR_wayland_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
                                                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks *         pAllocator,
                                                  VULKAN_HPP_NAMESPACE::SurfaceKHR *                        pSurface,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo,
                               Optional<const AllocationCallbacks> allocator
                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo,
                                     Optional<const AllocationCallbacks> allocator
                                                                         VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_WAYLAND_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
    //=== VK_KHR_android_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
                                                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks *         pAllocator,
                                                  VULKAN_HPP_NAMESPACE::SurfaceKHR *                        pSurface,
                                                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo,
                               Optional<const AllocationCallbacks> allocator
                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo,
                                     Optional<const AllocationCallbacks> allocator
                                                                         VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    //=== VK_KHR_win32_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
                                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                                VULKAN_HPP_NAMESPACE::SurfaceKHR *                      pSurface,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR &   createInfo,
                             Optional<const AllocationCallbacks> allocator
                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR &   createInfo,
                                   Optional<const AllocationCallbacks> allocator
                                                                       VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_WIN32_KHR*/

    //=== VK_EXT_debug_report ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT(
      const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *              pAllocator,
      VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT *                 pCallback,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT(
      const DebugReportCallbackCreateInfoEXT & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
      createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo,
                                          Optional<const AllocationCallbacks>      allocator
                                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT      callback,
                                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDebugReportCallbackEXT(
      VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT      callback,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT      flags,
                                VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
                                uint64_t                                       object,
                                size_t                                         location,
                                int32_t                                        messageCode,
                                const char *                                   pLayerPrefix,
                                const char *                                   pMessage,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT      flags,
                                VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
                                uint64_t                                       object,
                                size_t                                         location,
                                int32_t                                        messageCode,
                                const std::string &                            layerPrefix,
                                const std::string &                            message,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_GGP )
    //=== VK_GGP_stream_descriptor_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP(
      const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                  pAllocator,
      VULKAN_HPP_NAMESPACE::SurfaceKHR *                                 pSurface,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo,
                                        Optional<const AllocationCallbacks>          allocator
                                                                                     VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo,
                                              Optional<const AllocationCallbacks>          allocator
                                                                                           VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_GGP*/

#if defined( VK_USE_PLATFORM_VI_NN )
    //=== VK_NN_vi_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                                            VULKAN_HPP_NAMESPACE::SurfaceKHR *                  pSurface,
                                            Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createViSurfaceNN( const ViSurfaceCreateInfoNN &       createInfo,
                         Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createViSurfaceNNUnique( const ViSurfaceCreateInfoNN &       createInfo,
                               Optional<const AllocationCallbacks> allocator
                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                               Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_VI_NN*/

    //=== VK_KHR_device_group_creation ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR(
      uint32_t *                                            pPhysicalDeviceGroupCount,
      VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
              typename Dispatch                               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
      enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
    template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>,
              typename Dispatch                               = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
              typename B                                      = PhysicalDeviceGroupPropertiesAllocator,
              typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value,
                                      int>::type              = 0>
    VULKAN_HPP_NODISCARD
      typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
      enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_IOS_MVK )
    //=== VK_MVK_ios_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
                                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks *     pAllocator,
                                              VULKAN_HPP_NAMESPACE::SurfaceKHR *                    pSurface,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK &     createInfo,
                           Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK &     createInfo,
                                 Optional<const AllocationCallbacks> allocator
                                                                     VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_IOS_MVK*/

#if defined( VK_USE_PLATFORM_MACOS_MVK )
    //=== VK_MVK_macos_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
                                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                                VULKAN_HPP_NAMESPACE::SurfaceKHR *                      pSurface,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK &   createInfo,
                             Optional<const AllocationCallbacks> allocator
                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK &   createInfo,
                                   Optional<const AllocationCallbacks> allocator
                                                                       VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_MACOS_MVK*/

    //=== VK_EXT_debug_utils ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT(
      const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *              pAllocator,
      VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT *                 pMessenger,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT(
      const DebugUtilsMessengerCreateInfoEXT & createInfo,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
      createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo,
                                          Optional<const AllocationCallbacks>      allocator
                                                                                   VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT      messenger,
                                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                        Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroyDebugUtilsMessengerEXT(
      VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
      Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT      messenger,
                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
                  Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                  Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT       messageSeverity,
                                     VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT              messageTypes,
                                     const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
                                     VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT        messageTypes,
                                     const DebugUtilsMessengerCallbackDataEXT &                 callbackData,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const
      VULKAN_HPP_NOEXCEPT;
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
    //=== VK_FUCHSIA_imagepipe_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA(
      const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *               pAllocator,
      VULKAN_HPP_NAMESPACE::SurfaceKHR *                              pSurface,
      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
                                     Optional<const AllocationCallbacks>       allocator
                                                                               VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                     Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
                                           Optional<const AllocationCallbacks>       allocator
                                                                                     VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                           Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
    //=== VK_EXT_metal_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
                                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                                VULKAN_HPP_NAMESPACE::SurfaceKHR *                      pSurface,
                                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT &   createInfo,
                             Optional<const AllocationCallbacks> allocator
                                                                 VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                             Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT &   createInfo,
                                   Optional<const AllocationCallbacks> allocator
                                                                       VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_METAL_EXT*/

    //=== VK_EXT_headless_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
                                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks *          pAllocator,
                                                   VULKAN_HPP_NAMESPACE::SurfaceKHR *                         pSurface,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo,
                                Optional<const AllocationCallbacks>  allocator
                                                                     VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo,
                                      Optional<const AllocationCallbacks>  allocator
                                                                           VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
    //=== VK_EXT_directfb_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
                                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks *          pAllocator,
                                                   VULKAN_HPP_NAMESPACE::SurfaceKHR *                         pSurface,
                                                   Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo,
                                Optional<const AllocationCallbacks>  allocator
                                                                     VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo,
                                      Optional<const AllocationCallbacks>  allocator
                                                                           VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_DIRECTFB_EXT*/

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
    //=== VK_QNX_screen_surface ===

    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD Result
                         createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
                                                 const VULKAN_HPP_NAMESPACE::AllocationCallbacks *        pAllocator,
                                                 VULKAN_HPP_NAMESPACE::SurfaceKHR *                       pSurface,
                                                 Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
      createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX &  createInfo,
                              Optional<const AllocationCallbacks> allocator
                                                                  VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    ifndef VULKAN_HPP_NO_SMART_HANDLE
    template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
    VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
      typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
      createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX &  createInfo,
                                    Optional<const AllocationCallbacks> allocator
                                                                        VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_SCREEN_QNX*/

    VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT
    {
      return m_instance;
    }

    explicit operator bool() const VULKAN_HPP_NOEXCEPT
    {
      return m_instance != VK_NULL_HANDLE;
    }

    bool operator!() const VULKAN_HPP_NOEXCEPT
    {
      return m_instance == VK_NULL_HANDLE;
    }

  private:
    VkInstance m_instance = {};
  };
  static_assert( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ),
                 "handle and wrapper have different size!" );

  template <>
  struct VULKAN_HPP_DEPRECATED( "vk::cpp_type is deprecated. Use vk::CppType instead." ) cpp_type<ObjectType::eInstance>
  {
    using type = VULKAN_HPP_NAMESPACE::Instance;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eInstance>
  {
    using Type = VULKAN_HPP_NAMESPACE::Instance;
  };

  template <>
  struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT,
                 VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance>
  {
    using Type = VULKAN_HPP_NAMESPACE::Instance;
  };

  template <>
  struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Instance>
  {
    static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  };

  //=== VK_VERSION_1_0 ===

#ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  class UniqueHandleTraits<Instance, Dispatch>
  {
  public:
    using deleter = ObjectDestroy<NoParent, Dispatch>;
  };
  using UniqueInstance = UniqueHandle<Instance, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
#endif /*VULKAN_HPP_NO_SMART_HANDLE*/

  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo *  pCreateInfo,
                                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                              VULKAN_HPP_NAMESPACE::Instance *                  pInstance,
                                              Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
    VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type
    createInstance( const InstanceCreateInfo &          createInfo,
                    Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type
    createInstanceUnique( const InstanceCreateInfo &          createInfo,
                          Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties(
    const char *                                pLayerName,
    uint32_t *                                  pPropertyCount,
    VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
    Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>,
            typename Dispatch                     = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
    enumerateInstanceExtensionProperties( Optional<const std::string> layerName
                                                                      VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
  template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>,
            typename Dispatch                     = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
            typename B                            = ExtensionPropertiesAllocator,
            typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
  VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
    enumerateInstanceExtensionProperties( Optional<const std::string>    layerName,
                                          ExtensionPropertiesAllocator & extensionPropertiesAllocator,
                                          Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  VULKAN_HPP_NODISCARD Result
                       enumerateInstanceLayerProperties( uint32_t *                              pPropertyCount,
                                                         VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
                                                         Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>,
            typename Dispatch                 = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
    enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
  template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>,
            typename Dispatch                 = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE,
            typename B                        = LayerPropertiesAllocator,
            typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
  VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
    enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator,
                                      Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_VERSION_1_1 ===

  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  VULKAN_HPP_NODISCARD Result enumerateInstanceVersion(
    uint32_t * pApiVersion, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  typename ResultValueType<uint32_t>::type
    enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //===========================
  //=== COMMAND Definitions ===
  //===========================

  //=== VK_VERSION_1_0 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo *  pCreateInfo,
                                                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                         VULKAN_HPP_NAMESPACE::Instance *                  pInstance,
                                                         Dispatch const &                                  d ) VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ),
                                                    reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                    reinterpret_cast<VkInstance *>( pInstance ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type
    createInstance( const InstanceCreateInfo &          createInfo,
                    Optional<const AllocationCallbacks> allocator,
                    Dispatch const &                    d )
  {
    VULKAN_HPP_NAMESPACE::Instance instance;
    Result                         result = static_cast<Result>(
      d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                          reinterpret_cast<VkInstance *>( &instance ) ) );
    return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type
    createInstanceUnique( const InstanceCreateInfo &          createInfo,
                          Optional<const AllocationCallbacks> allocator,
                          Dispatch const &                    d )
  {
    VULKAN_HPP_NAMESPACE::Instance instance;
    Result                         result = static_cast<Result>(
      d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                          reinterpret_cast<VkInstance *>( &instance ) ) );
    ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Instance, Dispatch>(
      result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator,
                                            Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyInstance( m_instance,
                         reinterpret_cast<const VkAllocationCallbacks *>(
                           static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::enumeratePhysicalDevices( uint32_t *                             pPhysicalDeviceCount,
                                        VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
                                        Dispatch const &                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkEnumeratePhysicalDevices(
      m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PhysicalDeviceAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
    Instance::enumeratePhysicalDevices( Dispatch const & d ) const
  {
    std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
    uint32_t                                             physicalDeviceCount;
    Result                                               result;
    do
    {
      result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && physicalDeviceCount )
      {
        physicalDevices.resize( physicalDeviceCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDevices(
          m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
        VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) )
    {
      physicalDevices.resize( physicalDeviceCount );
    }
    return createResultValue(
      result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
  }

  template <typename PhysicalDeviceAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type
    Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
  {
    std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
    uint32_t                                             physicalDeviceCount;
    Result                                               result;
    do
    {
      result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && physicalDeviceCount )
      {
        physicalDevices.resize( physicalDeviceCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDevices(
          m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
        VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) )
    {
      physicalDevices.resize( physicalDeviceCount );
    }
    return createResultValue(
      result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
                                         PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
    return features;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format             format,
                                         VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
                                         Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceFormatProperties(
      m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
                                         PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
                                         Dispatch const &             d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
    d.vkGetPhysicalDeviceFormatProperties(
      m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
    return formatProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format                  format,
                                              VULKAN_HPP_NAMESPACE::ImageType               type,
                                              VULKAN_HPP_NAMESPACE::ImageTiling             tiling,
                                              VULKAN_HPP_NAMESPACE::ImageUsageFlags         usage,
                                              VULKAN_HPP_NAMESPACE::ImageCreateFlags        flags,
                                              VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties(
      m_physicalDevice,
      static_cast<VkFormat>( format ),
      static_cast<VkImageType>( type ),
      static_cast<VkImageTiling>( tiling ),
      static_cast<VkImageUsageFlags>( usage ),
      static_cast<VkImageCreateFlags>( flags ),
      reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
    PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format           format,
                                              VULKAN_HPP_NAMESPACE::ImageType        type,
                                              VULKAN_HPP_NAMESPACE::ImageTiling      tiling,
                                              VULKAN_HPP_NAMESPACE::ImageUsageFlags  usage,
                                              VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
                                              Dispatch const &                       d ) const
  {
    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties(
      m_physicalDevice,
      static_cast<VkFormat>( format ),
      static_cast<VkImageType>( type ),
      static_cast<VkImageTiling>( tiling ),
      static_cast<VkImageUsageFlags>( usage ),
      static_cast<VkImageCreateFlags>( flags ),
      reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) );
    return createResultValue(
      result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
                                         PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
    return properties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getQueueFamilyProperties( uint32_t *                                    pQueueFamilyPropertyCount,
                                              VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice,
                                                pQueueFamilyPropertyCount,
                                                reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
                                         PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
  {
    std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
    uint32_t                                                           queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    queueFamilyProperties.resize( queueFamilyPropertyCount );
    d.vkGetPhysicalDeviceQueueFamilyProperties(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    return queueFamilyProperties;
  }

  template <typename QueueFamilyPropertiesAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator>
                                         PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator,
                                              Dispatch const &                 d ) const
  {
    std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties(
      queueFamilyPropertiesAllocator );
    uint32_t queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    queueFamilyProperties.resize( queueFamilyPropertyCount );
    d.vkGetPhysicalDeviceQueueFamilyProperties(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    return queueFamilyProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice,
                                           reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
                                         PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice,
                                           reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
    return memoryProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char *     pName,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetInstanceProcAddr( m_instance, pName );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name,
                                                              Dispatch const &    d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetInstanceProcAddr( m_instance, name.c_str() );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char *     pName,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetDeviceProcAddr( m_device, pName );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name,
                                                            Dispatch const &    d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetDeviceProcAddr( m_device, name.c_str() );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo *    pCreateInfo,
                                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                  VULKAN_HPP_NAMESPACE::Device *                    pDevice,
                                  Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
                                                  reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ),
                                                  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                  reinterpret_cast<VkDevice *>( pDevice ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type
                                          PhysicalDevice::createDevice( const DeviceCreateInfo &            createInfo,
                                  Optional<const AllocationCallbacks> allocator,
                                  Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Device device;
    Result                       result = static_cast<Result>(
      d.vkCreateDevice( m_physicalDevice,
                        reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
                        reinterpret_cast<const VkAllocationCallbacks *>(
                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                        reinterpret_cast<VkDevice *>( &device ) ) );
    return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
    PhysicalDevice::createDeviceUnique( const DeviceCreateInfo &            createInfo,
                                        Optional<const AllocationCallbacks> allocator,
                                        Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Device device;
    Result                       result = static_cast<Result>(
      d.vkCreateDevice( m_physicalDevice,
                        reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
                        reinterpret_cast<const VkAllocationCallbacks *>(
                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                        reinterpret_cast<VkDevice *>( &device ) ) );
    ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Device, Dispatch>(
      result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDevice( m_device,
                       reinterpret_cast<const VkAllocationCallbacks *>(
                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         enumerateInstanceExtensionProperties( const char *                                pLayerName,
                                                                               uint32_t *                                  pPropertyCount,
                                                                               VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
                                                                               Dispatch const &                            d ) VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties(
      pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename ExtensionPropertiesAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
    enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d )
  {
    std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties;
    uint32_t                                                       propertyCount;
    Result                                                         result;
    do
    {
      result = static_cast<Result>(
        d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>(
          d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr,
                                                    &propertyCount,
                                                    reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
  }

  template <typename ExtensionPropertiesAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
    enumerateInstanceExtensionProperties( Optional<const std::string>    layerName,
                                          ExtensionPropertiesAllocator & extensionPropertiesAllocator,
                                          Dispatch const &               d )
  {
    std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
    uint32_t                                                       propertyCount;
    Result                                                         result;
    do
    {
      result = static_cast<Result>(
        d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>(
          d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr,
                                                    &propertyCount,
                                                    reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::enumerateDeviceExtensionProperties( const char *                                pLayerName,
                                                        uint32_t *                                  pPropertyCount,
                                                        VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
      m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename ExtensionPropertiesAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
    PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
                                                        Dispatch const &            d ) const
  {
    std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties;
    uint32_t                                                       propertyCount;
    Result                                                         result;
    do
    {
      result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
        m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>(
          d.vkEnumerateDeviceExtensionProperties( m_physicalDevice,
                                                  layerName ? layerName->c_str() : nullptr,
                                                  &propertyCount,
                                                  reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
  }

  template <typename ExtensionPropertiesAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type
    PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string>    layerName,
                                                        ExtensionPropertiesAllocator & extensionPropertiesAllocator,
                                                        Dispatch const &               d ) const
  {
    std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
    uint32_t                                                       propertyCount;
    Result                                                         result;
    do
    {
      result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties(
        m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>(
          d.vkEnumerateDeviceExtensionProperties( m_physicalDevice,
                                                  layerName ? layerName->c_str() : nullptr,
                                                  &propertyCount,
                                                  reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         enumerateInstanceLayerProperties( uint32_t *                              pPropertyCount,
                                                                           VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
                                                                           Dispatch const &                        d ) VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename LayerPropertiesAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
    enumerateInstanceLayerProperties( Dispatch const & d )
  {
    std::vector<LayerProperties, LayerPropertiesAllocator> properties;
    uint32_t                                               propertyCount;
    Result                                                 result;
    do
    {
      result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties(
          &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
  }

  template <typename LayerPropertiesAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
    enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
  {
    std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
    uint32_t                                               propertyCount;
    Result                                                 result;
    do
    {
      result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties(
          &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::enumerateDeviceLayerProperties( uint32_t *                              pPropertyCount,
                                                    VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
      m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename LayerPropertiesAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
    PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
  {
    std::vector<LayerProperties, LayerPropertiesAllocator> properties;
    uint32_t                                               propertyCount;
    Result                                                 result;
    do
    {
      result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
  }

  template <typename LayerPropertiesAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type
    PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator,
                                                    Dispatch const &           d ) const
  {
    std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
    uint32_t                                               propertyCount;
    Result                                                 result;
    do
    {
      result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getQueue( uint32_t                      queueFamilyIndex,
                                           uint32_t                      queueIndex,
                                           VULKAN_HPP_NAMESPACE::Queue * pQueue,
                                           Dispatch const &              d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
                                         Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::Queue queue;
    d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
    return queue;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t                                 submitCount,
                                                               const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
                                                               VULKAN_HPP_NAMESPACE::Fence              fence,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkQueueSubmit(
      m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
                   VULKAN_HPP_NAMESPACE::Fence                                fence,
                   Dispatch const &                                           d ) const
  {
    Result result = static_cast<Result>( d.vkQueueSubmit( m_queue,
                                                          submits.size(),
                                                          reinterpret_cast<const VkSubmitInfo *>( submits.data() ),
                                                          static_cast<VkFence>( fence ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Queue::waitIdle( Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::waitIdle( Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo *  pAllocateInfo,
                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                            VULKAN_HPP_NAMESPACE::DeviceMemory *              pMemory,
                            Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkAllocateMemory( m_device,
                                                    reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ),
                                                    reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                    reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
    Device::allocateMemory( const MemoryAllocateInfo &          allocateInfo,
                            Optional<const AllocationCallbacks> allocator,
                            Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
    Result                             result = static_cast<Result>(
      d.vkAllocateMemory( m_device,
                          reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                          reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
    return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
    Device::allocateMemoryUnique( const MemoryAllocateInfo &          allocateInfo,
                                  Optional<const AllocationCallbacks> allocator,
                                  Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
    Result                             result = static_cast<Result>(
      d.vkAllocateMemory( m_device,
                          reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                          reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
    ObjectFree<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>(
      result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory                memory,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkFreeMemory(
      m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory  memory,
                                             Optional<const AllocationCallbacks> allocator,
                                             Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkFreeMemory( m_device,
                    static_cast<VkDeviceMemory>( memory ),
                    reinterpret_cast<const VkAllocationCallbacks *>(
                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory                memory,
                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                       Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkFreeMemory(
      m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory  memory,
                                       Optional<const AllocationCallbacks> allocator,
                                       Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkFreeMemory( m_device,
                    static_cast<VkDeviceMemory>( memory ),
                    reinterpret_cast<const VkAllocationCallbacks *>(
                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory   memory,
                                                                   VULKAN_HPP_NAMESPACE::DeviceSize     offset,
                                                                   VULKAN_HPP_NAMESPACE::DeviceSize     size,
                                                                   VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
                                                                   void **                              ppData,
                                                                   Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkMapMemory( m_device,
                                               static_cast<VkDeviceMemory>( memory ),
                                               static_cast<VkDeviceSize>( offset ),
                                               static_cast<VkDeviceSize>( size ),
                                               static_cast<VkMemoryMapFlags>( flags ),
                                               ppData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void *>::type
                                          Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory   memory,
                       VULKAN_HPP_NAMESPACE::DeviceSize     offset,
                       VULKAN_HPP_NAMESPACE::DeviceSize     size,
                       VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
                       Dispatch const &                     d ) const
  {
    void * pData;
    Result result = static_cast<Result>( d.vkMapMemory( m_device,
                                                        static_cast<VkDeviceMemory>( memory ),
                                                        static_cast<VkDeviceSize>( offset ),
                                                        static_cast<VkDeviceSize>( size ),
                                                        static_cast<VkMemoryMapFlags>( flags ),
                                                        &pData ) );
    return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                                              Dispatch const &                   d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::flushMappedMemoryRanges( uint32_t                                        memoryRangeCount,
                                     const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
                                     Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkFlushMappedMemoryRanges(
      m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
                                     Dispatch const &                                                  d ) const
  {
    Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges(
      m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::invalidateMappedMemoryRanges( uint32_t                                        memoryRangeCount,
                                          const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
                                          Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkInvalidateMappedMemoryRanges(
      m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::invalidateMappedMemoryRanges(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges(
      m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                                                      VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
                                                      Dispatch const &                   d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetDeviceMemoryCommitment(
      m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
                                         Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                                 Dispatch const &                   d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
    d.vkGetDeviceMemoryCommitment(
      m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
    return committedMemoryInBytes;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer       buffer,
                                                                          VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                                                                          VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
                                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkBindBufferMemory( m_device,
                                                      static_cast<VkBuffer>( buffer ),
                                                      static_cast<VkDeviceMemory>( memory ),
                                                      static_cast<VkDeviceSize>( memoryOffset ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
                              VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                              VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
                              Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkBindBufferMemory( m_device,
                                                               static_cast<VkBuffer>( buffer ),
                                                               static_cast<VkDeviceMemory>( memory ),
                                                               static_cast<VkDeviceSize>( memoryOffset ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image        image,
                                                                         VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                                                                         VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
                                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkBindImageMemory( m_device,
                                                     static_cast<VkImage>( image ),
                                                     static_cast<VkDeviceMemory>( memory ),
                                                     static_cast<VkDeviceSize>( memoryOffset ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
                             VULKAN_HPP_NAMESPACE::DeviceMemory memory,
                             VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
                             Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkBindImageMemory( m_device,
                                                              static_cast<VkImage>( image ),
                                                              static_cast<VkDeviceMemory>( memory ),
                                                              static_cast<VkDeviceSize>( memoryOffset ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer               buffer,
                                         VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
                                         Dispatch const &                           d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetBufferMemoryRequirements(
      m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
                                         Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
                                         Dispatch const &             d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
    d.vkGetBufferMemoryRequirements(
      m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
    return memoryRequirements;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image                image,
                                        VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
                                        Dispatch const &                           d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetImageMemoryRequirements(
      m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
                                         Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
                                        Dispatch const &            d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
    d.vkGetImageMemoryRequirements(
      m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
    return memoryRequirements;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements(
    VULKAN_HPP_NAMESPACE::Image                           image,
    uint32_t *                                            pSparseMemoryRequirementCount,
    VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
    Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetImageSparseMemoryRequirements(
      m_device,
      static_cast<VkImage>( image ),
      pSparseMemoryRequirementCount,
      reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
                      Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
  {
    std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
    uint32_t                                                                           sparseMemoryRequirementCount;
    d.vkGetImageSparseMemoryRequirements(
      m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
    d.vkGetImageSparseMemoryRequirements(
      m_device,
      static_cast<VkImage>( image ),
      &sparseMemoryRequirementCount,
      reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
    return sparseMemoryRequirements;
  }

  template <
    typename SparseImageMemoryRequirementsAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
                      Device::getImageSparseMemoryRequirements(
      VULKAN_HPP_NAMESPACE::Image              image,
      SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
      Dispatch const &                         d ) const
  {
    std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
      sparseImageMemoryRequirementsAllocator );
    uint32_t sparseMemoryRequirementCount;
    d.vkGetImageSparseMemoryRequirements(
      m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
    d.vkGetImageSparseMemoryRequirements(
      m_device,
      static_cast<VkImage>( image ),
      &sparseMemoryRequirementCount,
      reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
    return sparseMemoryRequirements;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format                        format,
                                                    VULKAN_HPP_NAMESPACE::ImageType                     type,
                                                    VULKAN_HPP_NAMESPACE::SampleCountFlagBits           samples,
                                                    VULKAN_HPP_NAMESPACE::ImageUsageFlags               usage,
                                                    VULKAN_HPP_NAMESPACE::ImageTiling                   tiling,
                                                    uint32_t *                                          pPropertyCount,
                                                    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceSparseImageFormatProperties(
      m_physicalDevice,
      static_cast<VkFormat>( format ),
      static_cast<VkImageType>( type ),
      static_cast<VkSampleCountFlagBits>( samples ),
      static_cast<VkImageUsageFlags>( usage ),
      static_cast<VkImageTiling>( tiling ),
      pPropertyCount,
      reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
                                         PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format              format,
                                                    VULKAN_HPP_NAMESPACE::ImageType           type,
                                                    VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
                                                    VULKAN_HPP_NAMESPACE::ImageUsageFlags     usage,
                                                    VULKAN_HPP_NAMESPACE::ImageTiling         tiling,
                                                    Dispatch const &                          d ) const
  {
    std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
    uint32_t                                                                       propertyCount;
    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
                                                      static_cast<VkFormat>( format ),
                                                      static_cast<VkImageType>( type ),
                                                      static_cast<VkSampleCountFlagBits>( samples ),
                                                      static_cast<VkImageUsageFlags>( usage ),
                                                      static_cast<VkImageTiling>( tiling ),
                                                      &propertyCount,
                                                      nullptr );
    properties.resize( propertyCount );
    d.vkGetPhysicalDeviceSparseImageFormatProperties(
      m_physicalDevice,
      static_cast<VkFormat>( format ),
      static_cast<VkImageType>( type ),
      static_cast<VkSampleCountFlagBits>( samples ),
      static_cast<VkImageUsageFlags>( usage ),
      static_cast<VkImageTiling>( tiling ),
      &propertyCount,
      reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
    return properties;
  }

  template <
    typename SparseImageFormatPropertiesAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
                                         PhysicalDevice::getSparseImageFormatProperties(
      VULKAN_HPP_NAMESPACE::Format              format,
      VULKAN_HPP_NAMESPACE::ImageType           type,
      VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
      VULKAN_HPP_NAMESPACE::ImageUsageFlags     usage,
      VULKAN_HPP_NAMESPACE::ImageTiling         tiling,
      SparseImageFormatPropertiesAllocator &    sparseImageFormatPropertiesAllocator,
      Dispatch const &                          d ) const
  {
    std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties(
      sparseImageFormatPropertiesAllocator );
    uint32_t propertyCount;
    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
                                                      static_cast<VkFormat>( format ),
                                                      static_cast<VkImageType>( type ),
                                                      static_cast<VkSampleCountFlagBits>( samples ),
                                                      static_cast<VkImageUsageFlags>( usage ),
                                                      static_cast<VkImageTiling>( tiling ),
                                                      &propertyCount,
                                                      nullptr );
    properties.resize( propertyCount );
    d.vkGetPhysicalDeviceSparseImageFormatProperties(
      m_physicalDevice,
      static_cast<VkFormat>( format ),
      static_cast<VkImageType>( type ),
      static_cast<VkSampleCountFlagBits>( samples ),
      static_cast<VkImageUsageFlags>( usage ),
      static_cast<VkImageTiling>( tiling ),
      &propertyCount,
      reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
    return properties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Queue::bindSparse( uint32_t                                     bindInfoCount,
                       const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
                       VULKAN_HPP_NAMESPACE::Fence                  fence,
                       Dispatch const &                             d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkQueueBindSparse( m_queue,
                                                     bindInfoCount,
                                                     reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ),
                                                     static_cast<VkFence>( fence ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
                       VULKAN_HPP_NAMESPACE::Fence                                    fence,
                       Dispatch const &                                               d ) const
  {
    Result result =
      static_cast<Result>( d.vkQueueBindSparse( m_queue,
                                                bindInfo.size(),
                                                reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ),
                                                static_cast<VkFence>( fence ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo *     pCreateInfo,
                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                         VULKAN_HPP_NAMESPACE::Fence *                     pFence,
                         Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateFence( m_device,
                                                 reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ),
                                                 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                 reinterpret_cast<VkFence *>( pFence ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
                                          Device::createFence( const FenceCreateInfo &             createInfo,
                         Optional<const AllocationCallbacks> allocator,
                         Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Fence fence;
    Result                      result = static_cast<Result>(
      d.vkCreateFence( m_device,
                       reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
                       reinterpret_cast<const VkAllocationCallbacks *>(
                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                       reinterpret_cast<VkFence *>( &fence ) ) );
    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
    Device::createFenceUnique( const FenceCreateInfo &             createInfo,
                               Optional<const AllocationCallbacks> allocator,
                               Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Fence fence;
    Result                      result = static_cast<Result>(
      d.vkCreateFence( m_device,
                       reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
                       reinterpret_cast<const VkAllocationCallbacks *>(
                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                       reinterpret_cast<VkFence *>( &fence ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
      result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence                       fence,
                                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyFence(
      m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence         fence,
                                               Optional<const AllocationCallbacks> allocator,
                                               Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyFence( m_device,
                      static_cast<VkFence>( fence ),
                      reinterpret_cast<const VkAllocationCallbacks *>(
                        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence                       fence,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyFence(
      m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence         fence,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyFence( m_device,
                      static_cast<VkFence>( fence ),
                      reinterpret_cast<const VkAllocationCallbacks *>(
                        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t                            fenceCount,
                                                                     const VULKAN_HPP_NAMESPACE::Fence * pFences,
                                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
                                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence,
                                                                        Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t                            fenceCount,
                                                                       const VULKAN_HPP_NAMESPACE::Fence * pFences,
                                                                       VULKAN_HPP_NAMESPACE::Bool32        waitAll,
                                                                       uint64_t                            timeout,
                                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkWaitForFences(
      m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
                           VULKAN_HPP_NAMESPACE::Bool32                          waitAll,
                           uint64_t                                              timeout,
                           Dispatch const &                                      d ) const
  {
    Result result = static_cast<Result>( d.vkWaitForFences( m_device,
                                                            fences.size(),
                                                            reinterpret_cast<const VkFence *>( fences.data() ),
                                                            static_cast<VkBool32>( waitAll ),
                                                            timeout ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                             VULKAN_HPP_NAMESPACE::Semaphore *                 pSemaphore,
                             Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateSemaphore( m_device,
                                                     reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ),
                                                     reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                     reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
    Device::createSemaphore( const SemaphoreCreateInfo &         createInfo,
                             Optional<const AllocationCallbacks> allocator,
                             Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
    Result                          result = static_cast<Result>(
      d.vkCreateSemaphore( m_device,
                           reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                           reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
    return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
    Device::createSemaphoreUnique( const SemaphoreCreateInfo &         createInfo,
                                   Optional<const AllocationCallbacks> allocator,
                                   Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
    Result                          result = static_cast<Result>(
      d.vkCreateSemaphore( m_device,
                           reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                           reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>(
      result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore                   semaphore,
                                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                   Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySemaphore(
      m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore     semaphore,
                                                   Optional<const AllocationCallbacks> allocator,
                                                   Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySemaphore( m_device,
                          static_cast<VkSemaphore>( semaphore ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore                   semaphore,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySemaphore(
      m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore     semaphore,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySemaphore( m_device,
                          static_cast<VkSemaphore>( semaphore ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo *     pCreateInfo,
                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                         VULKAN_HPP_NAMESPACE::Event *                     pEvent,
                         Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateEvent( m_device,
                                                 reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ),
                                                 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                 reinterpret_cast<VkEvent *>( pEvent ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type
                                          Device::createEvent( const EventCreateInfo &             createInfo,
                         Optional<const AllocationCallbacks> allocator,
                         Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Event event;
    Result                      result = static_cast<Result>(
      d.vkCreateEvent( m_device,
                       reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
                       reinterpret_cast<const VkAllocationCallbacks *>(
                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                       reinterpret_cast<VkEvent *>( &event ) ) );
    return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type
    Device::createEventUnique( const EventCreateInfo &             createInfo,
                               Optional<const AllocationCallbacks> allocator,
                               Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Event event;
    Result                      result = static_cast<Result>(
      d.vkCreateEvent( m_device,
                       reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
                       reinterpret_cast<const VkAllocationCallbacks *>(
                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                       reinterpret_cast<VkEvent *>( &event ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Event, Dispatch>(
      result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event                       event,
                                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyEvent(
      m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event         event,
                                               Optional<const AllocationCallbacks> allocator,
                                               Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyEvent( m_device,
                      static_cast<VkEvent>( event ),
                      reinterpret_cast<const VkAllocationCallbacks *>(
                        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event                       event,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyEvent(
      m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event         event,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyEvent( m_device,
                      static_cast<VkEvent>( event ),
                      reinterpret_cast<const VkAllocationCallbacks *>(
                        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
                                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event,
                                                                        Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus",
                              { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event,
                                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
                                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
                                                                             Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                             VULKAN_HPP_NAMESPACE::QueryPool *                 pQueryPool,
                             Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateQueryPool( m_device,
                                                     reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ),
                                                     reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                     reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
    Device::createQueryPool( const QueryPoolCreateInfo &         createInfo,
                             Optional<const AllocationCallbacks> allocator,
                             Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
    Result                          result = static_cast<Result>(
      d.vkCreateQueryPool( m_device,
                           reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                           reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
    return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
    Device::createQueryPoolUnique( const QueryPoolCreateInfo &         createInfo,
                                   Optional<const AllocationCallbacks> allocator,
                                   Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
    Result                          result = static_cast<Result>(
      d.vkCreateQueryPool( m_device,
                           reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                           reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>(
      result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool                   queryPool,
                                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                   Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyQueryPool(
      m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool     queryPool,
                                                   Optional<const AllocationCallbacks> allocator,
                                                   Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyQueryPool( m_device,
                          static_cast<VkQueryPool>( queryPool ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool                   queryPool,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyQueryPool(
      m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool     queryPool,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyQueryPool( m_device,
                          static_cast<VkQueryPool>( queryPool ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                                 uint32_t                               firstQuery,
                                 uint32_t                               queryCount,
                                 size_t                                 dataSize,
                                 void *                                 pData,
                                 VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                                 VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
                                 Dispatch const &                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
                                                         static_cast<VkQueryPool>( queryPool ),
                                                         firstQuery,
                                                         queryCount,
                                                         dataSize,
                                                         pData,
                                                         static_cast<VkDeviceSize>( stride ),
                                                         static_cast<VkQueryResultFlags>( flags ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename T, typename Dispatch>
  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                                                          uint32_t                               firstQuery,
                                                          uint32_t                               queryCount,
                                                          ArrayProxy<T> const &                  data,
                                                          VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                                                          VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
                                                          Dispatch const &                       d ) const
  {
    Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
                                                                  static_cast<VkQueryPool>( queryPool ),
                                                                  firstQuery,
                                                                  queryCount,
                                                                  data.size() * sizeof( T ),
                                                                  reinterpret_cast<void *>( data.data() ),
                                                                  static_cast<VkDeviceSize>( stride ),
                                                                  static_cast<VkQueryResultFlags>( flags ) ) );
    return createResultValue(
      result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
  }

  template <typename T, typename Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<T, Allocator>>
                                         Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                                 uint32_t                               firstQuery,
                                 uint32_t                               queryCount,
                                 size_t                                 dataSize,
                                 VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                                 VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
                                 Dispatch const &                       d ) const
  {
    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
    std::vector<T, Allocator> data( dataSize / sizeof( T ) );
    Result                    result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
                                                                  static_cast<VkQueryPool>( queryPool ),
                                                                  firstQuery,
                                                                  queryCount,
                                                                  data.size() * sizeof( T ),
                                                                  reinterpret_cast<void *>( data.data() ),
                                                                  static_cast<VkDeviceSize>( stride ),
                                                                  static_cast<VkQueryResultFlags>( flags ) ) );
    return createResultValue( result,
                              data,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  }

  template <typename T, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<T>
                                         Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                                uint32_t                               firstQuery,
                                uint32_t                               queryCount,
                                VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                                VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
                                Dispatch const &                       d ) const
  {
    T      data;
    Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device,
                                                                  static_cast<VkQueryPool>( queryPool ),
                                                                  firstQuery,
                                                                  queryCount,
                                                                  sizeof( T ),
                                                                  reinterpret_cast<void *>( &data ),
                                                                  static_cast<VkDeviceSize>( stride ),
                                                                  static_cast<VkQueryResultFlags>( flags ) ) );
    return createResultValue( result,
                              data,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo *    pCreateInfo,
                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                          VULKAN_HPP_NAMESPACE::Buffer *                    pBuffer,
                          Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateBuffer( m_device,
                                                  reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ),
                                                  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                  reinterpret_cast<VkBuffer *>( pBuffer ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type
                                          Device::createBuffer( const BufferCreateInfo &            createInfo,
                          Optional<const AllocationCallbacks> allocator,
                          Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Buffer buffer;
    Result                       result = static_cast<Result>(
      d.vkCreateBuffer( m_device,
                        reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
                        reinterpret_cast<const VkAllocationCallbacks *>(
                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                        reinterpret_cast<VkBuffer *>( &buffer ) ) );
    return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type
    Device::createBufferUnique( const BufferCreateInfo &            createInfo,
                                Optional<const AllocationCallbacks> allocator,
                                Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Buffer buffer;
    Result                       result = static_cast<Result>(
      d.vkCreateBuffer( m_device,
                        reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
                        reinterpret_cast<const VkAllocationCallbacks *>(
                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                        reinterpret_cast<VkBuffer *>( &buffer ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>(
      result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer                      buffer,
                                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyBuffer(
      m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer        buffer,
                                                Optional<const AllocationCallbacks> allocator,
                                                Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyBuffer( m_device,
                       static_cast<VkBuffer>( buffer ),
                       reinterpret_cast<const VkAllocationCallbacks *>(
                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer                      buffer,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyBuffer(
      m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer        buffer,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyBuffer( m_device,
                       static_cast<VkBuffer>( buffer ),
                       reinterpret_cast<const VkAllocationCallbacks *>(
                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks *  pAllocator,
                              VULKAN_HPP_NAMESPACE::BufferView *                 pView,
                              Dispatch const &                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateBufferView( m_device,
                                                      reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ),
                                                      reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                      reinterpret_cast<VkBufferView *>( pView ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
    Device::createBufferView( const BufferViewCreateInfo &        createInfo,
                              Optional<const AllocationCallbacks> allocator,
                              Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::BufferView view;
    Result                           result = static_cast<Result>(
      d.vkCreateBufferView( m_device,
                            reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                            reinterpret_cast<VkBufferView *>( &view ) ) );
    return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
    Device::createBufferViewUnique( const BufferViewCreateInfo &        createInfo,
                                    Optional<const AllocationCallbacks> allocator,
                                    Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::BufferView view;
    Result                           result = static_cast<Result>(
      d.vkCreateBufferView( m_device,
                            reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                            reinterpret_cast<VkBufferView *>( &view ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>(
      result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView                  bufferView,
                                                    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyBufferView( m_device,
                           static_cast<VkBufferView>( bufferView ),
                           reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView    bufferView,
                                                    Optional<const AllocationCallbacks> allocator,
                                                    Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyBufferView( m_device,
                           static_cast<VkBufferView>( bufferView ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView                  bufferView,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyBufferView( m_device,
                           static_cast<VkBufferView>( bufferView ),
                           reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView    bufferView,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyBufferView( m_device,
                           static_cast<VkBufferView>( bufferView ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo *     pCreateInfo,
                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                         VULKAN_HPP_NAMESPACE::Image *                     pImage,
                         Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateImage( m_device,
                                                 reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ),
                                                 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                 reinterpret_cast<VkImage *>( pImage ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type
                                          Device::createImage( const ImageCreateInfo &             createInfo,
                         Optional<const AllocationCallbacks> allocator,
                         Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Image image;
    Result                      result = static_cast<Result>(
      d.vkCreateImage( m_device,
                       reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
                       reinterpret_cast<const VkAllocationCallbacks *>(
                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                       reinterpret_cast<VkImage *>( &image ) ) );
    return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type
    Device::createImageUnique( const ImageCreateInfo &             createInfo,
                               Optional<const AllocationCallbacks> allocator,
                               Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Image image;
    Result                      result = static_cast<Result>(
      d.vkCreateImage( m_device,
                       reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
                       reinterpret_cast<const VkAllocationCallbacks *>(
                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                       reinterpret_cast<VkImage *>( &image ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Image, Dispatch>(
      result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image                       image,
                                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyImage(
      m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image         image,
                                               Optional<const AllocationCallbacks> allocator,
                                               Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyImage( m_device,
                      static_cast<VkImage>( image ),
                      reinterpret_cast<const VkAllocationCallbacks *>(
                        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image                       image,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyImage(
      m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image         image,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyImage( m_device,
                      static_cast<VkImage>( image ),
                      reinterpret_cast<const VkAllocationCallbacks *>(
                        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image                    image,
                                                            const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
                                                            VULKAN_HPP_NAMESPACE::SubresourceLayout *      pLayout,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetImageSubresourceLayout( m_device,
                                   static_cast<VkImage>( image ),
                                   reinterpret_cast<const VkImageSubresource *>( pSubresource ),
                                   reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout
                                         Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
                                       const ImageSubresource &    subresource,
                                       Dispatch const &            d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
    d.vkGetImageSubresourceLayout( m_device,
                                   static_cast<VkImage>( image ),
                                   reinterpret_cast<const VkImageSubresource *>( &subresource ),
                                   reinterpret_cast<VkSubresourceLayout *>( &layout ) );
    return layout;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                             VULKAN_HPP_NAMESPACE::ImageView *                 pView,
                             Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateImageView( m_device,
                                                     reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ),
                                                     reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                     reinterpret_cast<VkImageView *>( pView ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
    Device::createImageView( const ImageViewCreateInfo &         createInfo,
                             Optional<const AllocationCallbacks> allocator,
                             Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::ImageView view;
    Result                          result = static_cast<Result>(
      d.vkCreateImageView( m_device,
                           reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                           reinterpret_cast<VkImageView *>( &view ) ) );
    return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
    Device::createImageViewUnique( const ImageViewCreateInfo &         createInfo,
                                   Optional<const AllocationCallbacks> allocator,
                                   Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::ImageView view;
    Result                          result = static_cast<Result>(
      d.vkCreateImageView( m_device,
                           reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                           reinterpret_cast<VkImageView *>( &view ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>(
      result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView                   imageView,
                                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                   Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyImageView(
      m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView     imageView,
                                                   Optional<const AllocationCallbacks> allocator,
                                                   Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyImageView( m_device,
                          static_cast<VkImageView>( imageView ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView                   imageView,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyImageView(
      m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView     imageView,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyImageView( m_device,
                          static_cast<VkImageView>( imageView ),
                          reinterpret_cast<const VkAllocationCallbacks *>(
                            static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks *    pAllocator,
                                VULKAN_HPP_NAMESPACE::ShaderModule *                 pShaderModule,
                                Dispatch const &                                     d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateShaderModule( m_device,
                              reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
                              reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                              reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
    Device::createShaderModule( const ShaderModuleCreateInfo &      createInfo,
                                Optional<const AllocationCallbacks> allocator,
                                Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
    Result                             result = static_cast<Result>(
      d.vkCreateShaderModule( m_device,
                              reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
                              reinterpret_cast<const VkAllocationCallbacks *>(
                                static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                              reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
    return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
    Device::createShaderModuleUnique( const ShaderModuleCreateInfo &      createInfo,
                                      Optional<const AllocationCallbacks> allocator,
                                      Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
    Result                             result = static_cast<Result>(
      d.vkCreateShaderModule( m_device,
                              reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
                              reinterpret_cast<const VkAllocationCallbacks *>(
                                static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                              reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>(
      result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule                shaderModule,
                                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyShaderModule( m_device,
                             static_cast<VkShaderModule>( shaderModule ),
                             reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule  shaderModule,
                                                      Optional<const AllocationCallbacks> allocator,
                                                      Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyShaderModule( m_device,
                             static_cast<VkShaderModule>( shaderModule ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule                shaderModule,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyShaderModule( m_device,
                             static_cast<VkShaderModule>( shaderModule ),
                             reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule  shaderModule,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyShaderModule( m_device,
                             static_cast<VkShaderModule>( shaderModule ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
                                 const VULKAN_HPP_NAMESPACE::AllocationCallbacks *     pAllocator,
                                 VULKAN_HPP_NAMESPACE::PipelineCache *                 pPipelineCache,
                                 Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreatePipelineCache( m_device,
                               reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                               reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
    Device::createPipelineCache( const PipelineCacheCreateInfo &     createInfo,
                                 Optional<const AllocationCallbacks> allocator,
                                 Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
    Result                              result = static_cast<Result>(
      d.vkCreatePipelineCache( m_device,
                               reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                               reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
    return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
    Device::createPipelineCacheUnique( const PipelineCacheCreateInfo &     createInfo,
                                       Optional<const AllocationCallbacks> allocator,
                                       Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
    Result                              result = static_cast<Result>(
      d.vkCreatePipelineCache( m_device,
                               reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                               reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>(
      result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache               pipelineCache,
                                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipelineCache( m_device,
                              static_cast<VkPipelineCache>( pipelineCache ),
                              reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
                                                       Optional<const AllocationCallbacks> allocator,
                                                       Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipelineCache( m_device,
                              static_cast<VkPipelineCache>( pipelineCache ),
                              reinterpret_cast<const VkAllocationCallbacks *>(
                                static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache               pipelineCache,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipelineCache( m_device,
                              static_cast<VkPipelineCache>( pipelineCache ),
                              reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipelineCache( m_device,
                              static_cast<VkPipelineCache>( pipelineCache ),
                              reinterpret_cast<const VkAllocationCallbacks *>(
                                static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
                                  size_t *                            pDataSize,
                                  void *                              pData,
                                  Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Uint8_tAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
                       Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
  {
    std::vector<uint8_t, Uint8_tAllocator> data;
    size_t                                 dataSize;
    Result                                 result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
      if ( ( result == Result::eSuccess ) && dataSize )
      {
        data.resize( dataSize );
        result = static_cast<Result>( d.vkGetPipelineCacheData( m_device,
                                                                static_cast<VkPipelineCache>( pipelineCache ),
                                                                &dataSize,
                                                                reinterpret_cast<void *>( data.data() ) ) );
        VULKAN_HPP_ASSERT( dataSize <= data.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
    {
      data.resize( dataSize );
    }
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
  }

  template <typename Uint8_tAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
                       Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
                                  Uint8_tAllocator &                  uint8_tAllocator,
                                  Dispatch const &                    d ) const
  {
    std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
    size_t                                 dataSize;
    Result                                 result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
      if ( ( result == Result::eSuccess ) && dataSize )
      {
        data.resize( dataSize );
        result = static_cast<Result>( d.vkGetPipelineCacheData( m_device,
                                                                static_cast<VkPipelineCache>( pipelineCache ),
                                                                &dataSize,
                                                                reinterpret_cast<void *>( data.data() ) ) );
        VULKAN_HPP_ASSERT( dataSize <= data.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
    {
      data.resize( dataSize );
    }
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache         dstCache,
                                 uint32_t                                    srcCacheCount,
                                 const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
                                 Dispatch const &                            d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkMergePipelineCaches( m_device,
                                                         static_cast<VkPipelineCache>( dstCache ),
                                                         srcCacheCount,
                                                         reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache                           dstCache,
                                 ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
                                 Dispatch const &                                              d ) const
  {
    Result result =
      static_cast<Result>( d.vkMergePipelineCaches( m_device,
                                                    static_cast<VkPipelineCache>( dstCache ),
                                                    srcCaches.size(),
                                                    reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache                      pipelineCache,
                                     uint32_t                                                 createInfoCount,
                                     const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
                                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks *        pAllocator,
                                     VULKAN_HPP_NAMESPACE::Pipeline *                         pPipelines,
                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateGraphicsPipelines( m_device,
                                   static_cast<VkPipelineCache>( pipelineCache ),
                                   createInfoCount,
                                   reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
                                   reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                   reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PipelineAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
                                         Device::createGraphicsPipelines(
      VULKAN_HPP_NAMESPACE::PipelineCache                                        pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks>                                        allocator,
      Dispatch const &                                                           d ) const
  {
    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
    Result                                   result = static_cast<Result>(
      d.vkCreateGraphicsPipelines( m_device,
                                   static_cast<VkPipelineCache>( pipelineCache ),
                                   createInfos.size(),
                                   reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    return createResultValue(
      result,
      pipelines,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename PipelineAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
                                         Device::createGraphicsPipelines(
      VULKAN_HPP_NAMESPACE::PipelineCache                                        pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks>                                        allocator,
      PipelineAllocator &                                                        pipelineAllocator,
      Dispatch const &                                                           d ) const
  {
    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
    Result                                   result = static_cast<Result>(
      d.vkCreateGraphicsPipelines( m_device,
                                   static_cast<VkPipelineCache>( pipelineCache ),
                                   createInfos.size(),
                                   reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    return createResultValue(
      result,
      pipelines,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
                                         Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache                      pipelineCache,
                                    const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
                                    Optional<const AllocationCallbacks>                      allocator,
                                    Dispatch const &                                         d ) const
  {
    Pipeline pipeline;
    Result   result = static_cast<Result>(
      d.vkCreateGraphicsPipelines( m_device,
                                   static_cast<VkPipelineCache>( pipelineCache ),
                                   1,
                                   reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkPipeline *>( &pipeline ) ) );
    return createResultValue(
      result,
      pipeline,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch, typename PipelineAllocator>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                                         Device::createGraphicsPipelinesUnique(
      VULKAN_HPP_NAMESPACE::PipelineCache                                        pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks>                                        allocator,
      Dispatch const &                                                           d ) const
  {
    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
    std::vector<Pipeline>                                            pipelines( createInfos.size() );
    Result                                                           result = static_cast<Result>(
      d.vkCreateGraphicsPipelines( m_device,
                                   static_cast<VkPipelineCache>( pipelineCache ),
                                   createInfos.size(),
                                   reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
    {
      uniquePipelines.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
      }
    }
    return createResultValue(
      result,
      std::move( uniquePipelines ),
      VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <
    typename Dispatch,
    typename PipelineAllocator,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                                         Device::createGraphicsPipelinesUnique(
      VULKAN_HPP_NAMESPACE::PipelineCache                                        pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks>                                        allocator,
      PipelineAllocator &                                                        pipelineAllocator,
      Dispatch const &                                                           d ) const
  {
    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
    std::vector<Pipeline>                                            pipelines( createInfos.size() );
    Result                                                           result = static_cast<Result>(
      d.vkCreateGraphicsPipelines( m_device,
                                   static_cast<VkPipelineCache>( pipelineCache ),
                                   createInfos.size(),
                                   reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
    {
      uniquePipelines.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
      }
    }
    return createResultValue(
      result,
      std::move( uniquePipelines ),
      VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
                                         Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache                      pipelineCache,
                                          const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
                                          Optional<const AllocationCallbacks>                      allocator,
                                          Dispatch const &                                         d ) const
  {
    Pipeline pipeline;
    Result   result = static_cast<Result>(
      d.vkCreateGraphicsPipelines( m_device,
                                   static_cast<VkPipelineCache>( pipelineCache ),
                                   1,
                                   reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkPipeline *>( &pipeline ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<Pipeline, Dispatch>(
      result,
      pipeline,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
      deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache                     pipelineCache,
                                    uint32_t                                                createInfoCount,
                                    const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
                                    const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                    VULKAN_HPP_NAMESPACE::Pipeline *                        pPipelines,
                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateComputePipelines( m_device,
                                  static_cast<VkPipelineCache>( pipelineCache ),
                                  createInfoCount,
                                  reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
                                  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                  reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PipelineAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
                                         Device::createComputePipelines(
      VULKAN_HPP_NAMESPACE::PipelineCache                                       pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks>                                       allocator,
      Dispatch const &                                                          d ) const
  {
    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
    Result                                   result = static_cast<Result>(
      d.vkCreateComputePipelines( m_device,
                                  static_cast<VkPipelineCache>( pipelineCache ),
                                  createInfos.size(),
                                  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    return createResultValue(
      result,
      pipelines,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename PipelineAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
                                         Device::createComputePipelines(
      VULKAN_HPP_NAMESPACE::PipelineCache                                       pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks>                                       allocator,
      PipelineAllocator &                                                       pipelineAllocator,
      Dispatch const &                                                          d ) const
  {
    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
    Result                                   result = static_cast<Result>(
      d.vkCreateComputePipelines( m_device,
                                  static_cast<VkPipelineCache>( pipelineCache ),
                                  createInfos.size(),
                                  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    return createResultValue(
      result,
      pipelines,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
                                         Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache                     pipelineCache,
                                   const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
                                   Optional<const AllocationCallbacks>                     allocator,
                                   Dispatch const &                                        d ) const
  {
    Pipeline pipeline;
    Result   result = static_cast<Result>(
      d.vkCreateComputePipelines( m_device,
                                  static_cast<VkPipelineCache>( pipelineCache ),
                                  1,
                                  reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
    return createResultValue(
      result,
      pipeline,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch, typename PipelineAllocator>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                                         Device::createComputePipelinesUnique(
      VULKAN_HPP_NAMESPACE::PipelineCache                                       pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks>                                       allocator,
      Dispatch const &                                                          d ) const
  {
    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
    std::vector<Pipeline>                                            pipelines( createInfos.size() );
    Result                                                           result = static_cast<Result>(
      d.vkCreateComputePipelines( m_device,
                                  static_cast<VkPipelineCache>( pipelineCache ),
                                  createInfos.size(),
                                  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
    {
      uniquePipelines.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
      }
    }
    return createResultValue(
      result,
      std::move( uniquePipelines ),
      VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <
    typename Dispatch,
    typename PipelineAllocator,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                                         Device::createComputePipelinesUnique(
      VULKAN_HPP_NAMESPACE::PipelineCache                                       pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
      Optional<const AllocationCallbacks>                                       allocator,
      PipelineAllocator &                                                       pipelineAllocator,
      Dispatch const &                                                          d ) const
  {
    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
    std::vector<Pipeline>                                            pipelines( createInfos.size() );
    Result                                                           result = static_cast<Result>(
      d.vkCreateComputePipelines( m_device,
                                  static_cast<VkPipelineCache>( pipelineCache ),
                                  createInfos.size(),
                                  reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
    {
      uniquePipelines.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
      }
    }
    return createResultValue(
      result,
      std::move( uniquePipelines ),
      VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
                                         Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache                     pipelineCache,
                                         const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
                                         Optional<const AllocationCallbacks>                     allocator,
                                         Dispatch const &                                        d ) const
  {
    Pipeline pipeline;
    Result   result = static_cast<Result>(
      d.vkCreateComputePipelines( m_device,
                                  static_cast<VkPipelineCache>( pipelineCache ),
                                  1,
                                  reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkPipeline *>( &pipeline ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<Pipeline, Dispatch>(
      result,
      pipeline,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
      deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline                    pipeline,
                                                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipeline(
      m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline      pipeline,
                                                  Optional<const AllocationCallbacks> allocator,
                                                  Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipeline( m_device,
                         static_cast<VkPipeline>( pipeline ),
                         reinterpret_cast<const VkAllocationCallbacks *>(
                           static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline                    pipeline,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipeline(
      m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline      pipeline,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipeline( m_device,
                         static_cast<VkPipeline>( pipeline ),
                         reinterpret_cast<const VkAllocationCallbacks *>(
                           static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
                                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks *      pAllocator,
                                  VULKAN_HPP_NAMESPACE::PipelineLayout *                 pPipelineLayout,
                                  Dispatch const &                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreatePipelineLayout( m_device,
                                reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
    Device::createPipelineLayout( const PipelineLayoutCreateInfo &    createInfo,
                                  Optional<const AllocationCallbacks> allocator,
                                  Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
    Result                               result = static_cast<Result>(
      d.vkCreatePipelineLayout( m_device,
                                reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
    return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
    Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo &    createInfo,
                                        Optional<const AllocationCallbacks> allocator,
                                        Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
    Result                               result = static_cast<Result>(
      d.vkCreatePipelineLayout( m_device,
                                reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>(
      result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
                                                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipelineLayout( m_device,
                               static_cast<VkPipelineLayout>( pipelineLayout ),
                               reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
                                                        Optional<const AllocationCallbacks>  allocator,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipelineLayout( m_device,
                               static_cast<VkPipelineLayout>( pipelineLayout ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout              pipelineLayout,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipelineLayout( m_device,
                               static_cast<VkPipelineLayout>( pipelineLayout ),
                               reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
                                          Optional<const AllocationCallbacks>  allocator,
                                          Dispatch const &                     d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPipelineLayout( m_device,
                               static_cast<VkPipelineLayout>( pipelineLayout ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo *   pCreateInfo,
                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                           VULKAN_HPP_NAMESPACE::Sampler *                   pSampler,
                           Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateSampler( m_device,
                                                   reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ),
                                                   reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                   reinterpret_cast<VkSampler *>( pSampler ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type
    Device::createSampler( const SamplerCreateInfo &           createInfo,
                           Optional<const AllocationCallbacks> allocator,
                           Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Sampler sampler;
    Result                        result = static_cast<Result>(
      d.vkCreateSampler( m_device,
                         reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
                         reinterpret_cast<const VkAllocationCallbacks *>(
                           static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                         reinterpret_cast<VkSampler *>( &sampler ) ) );
    return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type
    Device::createSamplerUnique( const SamplerCreateInfo &           createInfo,
                                 Optional<const AllocationCallbacks> allocator,
                                 Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Sampler sampler;
    Result                        result = static_cast<Result>(
      d.vkCreateSampler( m_device,
                         reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
                         reinterpret_cast<const VkAllocationCallbacks *>(
                           static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                         reinterpret_cast<VkSampler *>( &sampler ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>(
      result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler                     sampler,
                                                 const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySampler(
      m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler       sampler,
                                                 Optional<const AllocationCallbacks> allocator,
                                                 Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySampler( m_device,
                        static_cast<VkSampler>( sampler ),
                        reinterpret_cast<const VkAllocationCallbacks *>(
                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler                     sampler,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySampler(
      m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler       sampler,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySampler( m_device,
                        static_cast<VkSampler>( sampler ),
                        reinterpret_cast<const VkAllocationCallbacks *>(
                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks *           pAllocator,
                                       VULKAN_HPP_NAMESPACE::DescriptorSetLayout *                 pSetLayout,
                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateDescriptorSetLayout( m_device,
                                     reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
                                     reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                     reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
    Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo,
                                       Optional<const AllocationCallbacks>   allocator,
                                       Dispatch const &                      d ) const
  {
    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
    Result                                    result = static_cast<Result>(
      d.vkCreateDescriptorSetLayout( m_device,
                                     reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                     reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
    return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
    Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo,
                                             Optional<const AllocationCallbacks>   allocator,
                                             Dispatch const &                      d ) const
  {
    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
    Result                                    result = static_cast<Result>(
      d.vkCreateDescriptorSetLayout( m_device,
                                     reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                     reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>(
      result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout         descriptorSetLayout,
                                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                        Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorSetLayout( m_device,
                                    static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
                                    reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
                                        Optional<const AllocationCallbacks>       allocator,
                                        Dispatch const &                          d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorSetLayout( m_device,
                                    static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout         descriptorSetLayout,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorSetLayout( m_device,
                                    static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
                                    reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
                                          Optional<const AllocationCallbacks>       allocator,
                                          Dispatch const &                          d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorSetLayout( m_device,
                                    static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
                                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks *      pAllocator,
                                  VULKAN_HPP_NAMESPACE::DescriptorPool *                 pDescriptorPool,
                                  Dispatch const &                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateDescriptorPool( m_device,
                                reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
    Device::createDescriptorPool( const DescriptorPoolCreateInfo &    createInfo,
                                  Optional<const AllocationCallbacks> allocator,
                                  Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
    Result                               result = static_cast<Result>(
      d.vkCreateDescriptorPool( m_device,
                                reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
    return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
    Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo &    createInfo,
                                        Optional<const AllocationCallbacks> allocator,
                                        Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
    Result                               result = static_cast<Result>(
      d.vkCreateDescriptorPool( m_device,
                                reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>(
      result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
                                                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorPool( m_device,
                               static_cast<VkDescriptorPool>( descriptorPool ),
                               reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
                                                        Optional<const AllocationCallbacks>  allocator,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorPool( m_device,
                               static_cast<VkDescriptorPool>( descriptorPool ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool              descriptorPool,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorPool( m_device,
                               static_cast<VkDescriptorPool>( descriptorPool ),
                               reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
                                          Optional<const AllocationCallbacks>  allocator,
                                          Dispatch const &                     d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorPool( m_device,
                               static_cast<VkDescriptorPool>( descriptorPool ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool           descriptorPool,
                                                        VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkResetDescriptorPool(
      m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
                                 VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
                                 Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkResetDescriptorPool(
      m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
                                    VULKAN_HPP_NAMESPACE::DescriptorSet *                   pDescriptorSets,
                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkAllocateDescriptorSets( m_device,
                                  reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ),
                                  reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename DescriptorSetAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
    Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
  {
    std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
    Result                                             result = static_cast<Result>(
      d.vkAllocateDescriptorSets( m_device,
                                  reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
                                  reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
  }

  template <typename DescriptorSetAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type
    Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo,
                                    DescriptorSetAllocator &          descriptorSetAllocator,
                                    Dispatch const &                  d ) const
  {
    std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount,
                                                                       descriptorSetAllocator );
    Result                                             result = static_cast<Result>(
      d.vkAllocateDescriptorSets( m_device,
                                  reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
                                  reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
    return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch, typename DescriptorSetAllocator>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
    Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
  {
    std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
    std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
    Result                     result = static_cast<Result>(
      d.vkAllocateDescriptorSets( m_device,
                                  reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
                                  reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
    {
      uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
      PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
      for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ )
      {
        uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
      }
    }
    return createResultValue(
      result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
  }

  template <typename Dispatch,
            typename DescriptorSetAllocator,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value,
                                    int>::type>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
    Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo,
                                          DescriptorSetAllocator &          descriptorSetAllocator,
                                          Dispatch const &                  d ) const
  {
    std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets(
      descriptorSetAllocator );
    std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
    Result                     result = static_cast<Result>(
      d.vkAllocateDescriptorSets( m_device,
                                  reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ),
                                  reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
    {
      uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
      PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
      for ( size_t i = 0; i < allocateInfo.descriptorSetCount; i++ )
      {
        uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
      }
    }
    return createResultValue(
      result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool        descriptorPool,
                                                       uint32_t                                    descriptorSetCount,
                                                       const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkFreeDescriptorSets( m_device,
                              static_cast<VkDescriptorPool>( descriptorPool ),
                              descriptorSetCount,
                              reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool                          descriptorPool,
                                ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
                                Dispatch const &                                              d ) const
  {
    Result result = static_cast<Result>(
      d.vkFreeDescriptorSets( m_device,
                              static_cast<VkDescriptorPool>( descriptorPool ),
                              descriptorSets.size(),
                              reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool        descriptorPool,
                                         uint32_t                                    descriptorSetCount,
                                         const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
                                         Dispatch const &                            d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkFreeDescriptorSets( m_device,
                              static_cast<VkDescriptorPool>( descriptorPool ),
                              descriptorSetCount,
                              reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool                          descriptorPool,
                  ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
                  Dispatch const &                                              d ) const
  {
    Result result = static_cast<Result>(
      d.vkFreeDescriptorSets( m_device,
                              static_cast<VkDescriptorPool>( descriptorPool ),
                              descriptorSets.size(),
                              reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::updateDescriptorSets( uint32_t                                         descriptorWriteCount,
                                  const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
                                  uint32_t                                         descriptorCopyCount,
                                  const VULKAN_HPP_NAMESPACE::CopyDescriptorSet *  pDescriptorCopies,
                                  Dispatch const &                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkUpdateDescriptorSets( m_device,
                              descriptorWriteCount,
                              reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ),
                              descriptorCopyCount,
                              reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
                                  ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const &  descriptorCopies,
                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkUpdateDescriptorSets( m_device,
                              descriptorWrites.size(),
                              reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
                              descriptorCopies.size(),
                              reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                               VULKAN_HPP_NAMESPACE::Framebuffer *                 pFramebuffer,
                               Dispatch const &                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateFramebuffer( m_device,
                                                       reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ),
                                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                       reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
    Device::createFramebuffer( const FramebufferCreateInfo &       createInfo,
                               Optional<const AllocationCallbacks> allocator,
                               Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
    Result                            result = static_cast<Result>(
      d.vkCreateFramebuffer( m_device,
                             reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
    return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
    Device::createFramebufferUnique( const FramebufferCreateInfo &       createInfo,
                                     Optional<const AllocationCallbacks> allocator,
                                     Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
    Result                            result = static_cast<Result>(
      d.vkCreateFramebuffer( m_device,
                             reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>(
      result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer                 framebuffer,
                                                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyFramebuffer( m_device,
                            static_cast<VkFramebuffer>( framebuffer ),
                            reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer   framebuffer,
                                                     Optional<const AllocationCallbacks> allocator,
                                                     Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyFramebuffer( m_device,
                            static_cast<VkFramebuffer>( framebuffer ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer                 framebuffer,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyFramebuffer( m_device,
                            static_cast<VkFramebuffer>( framebuffer ),
                            reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer   framebuffer,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyFramebuffer( m_device,
                            static_cast<VkFramebuffer>( framebuffer ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks *  pAllocator,
                              VULKAN_HPP_NAMESPACE::RenderPass *                 pRenderPass,
                              Dispatch const &                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateRenderPass( m_device,
                                                      reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ),
                                                      reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                      reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
    Device::createRenderPass( const RenderPassCreateInfo &        createInfo,
                              Optional<const AllocationCallbacks> allocator,
                              Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
    Result                           result = static_cast<Result>(
      d.vkCreateRenderPass( m_device,
                            reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                            reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
    Device::createRenderPassUnique( const RenderPassCreateInfo &        createInfo,
                                    Optional<const AllocationCallbacks> allocator,
                                    Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
    Result                           result = static_cast<Result>(
      d.vkCreateRenderPass( m_device,
                            reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                            reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
      result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass                  renderPass,
                                                    const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyRenderPass( m_device,
                           static_cast<VkRenderPass>( renderPass ),
                           reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass    renderPass,
                                                    Optional<const AllocationCallbacks> allocator,
                                                    Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyRenderPass( m_device,
                           static_cast<VkRenderPass>( renderPass ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass                  renderPass,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyRenderPass( m_device,
                           static_cast<VkRenderPass>( renderPass ),
                           reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass    renderPass,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyRenderPass( m_device,
                           static_cast<VkRenderPass>( renderPass ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
                                                           VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
                                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetRenderAreaGranularity(
      m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
                                         Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
                                      Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::Extent2D granularity;
    d.vkGetRenderAreaGranularity(
      m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
    return granularity;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                               VULKAN_HPP_NAMESPACE::CommandPool *                 pCommandPool,
                               Dispatch const &                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateCommandPool( m_device,
                                                       reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ),
                                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                       reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
    Device::createCommandPool( const CommandPoolCreateInfo &       createInfo,
                               Optional<const AllocationCallbacks> allocator,
                               Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
    Result                            result = static_cast<Result>(
      d.vkCreateCommandPool( m_device,
                             reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
    return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
    Device::createCommandPoolUnique( const CommandPoolCreateInfo &       createInfo,
                                     Optional<const AllocationCallbacks> allocator,
                                     Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
    Result                            result = static_cast<Result>(
      d.vkCreateCommandPool( m_device,
                             reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>(
      result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool                 commandPool,
                                                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCommandPool( m_device,
                            static_cast<VkCommandPool>( commandPool ),
                            reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool   commandPool,
                                                     Optional<const AllocationCallbacks> allocator,
                                                     Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCommandPool( m_device,
                            static_cast<VkCommandPool>( commandPool ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool                 commandPool,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCommandPool( m_device,
                            static_cast<VkCommandPool>( commandPool ),
                            reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool   commandPool,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCommandPool( m_device,
                            static_cast<VkCommandPool>( commandPool ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool           commandPool,
                              VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
                              Dispatch const &                            d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkResetCommandPool(
      m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
                              VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
                              Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkResetCommandPool(
      m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
                                    VULKAN_HPP_NAMESPACE::CommandBuffer *                   pCommandBuffers,
                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkAllocateCommandBuffers( m_device,
                                  reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ),
                                  reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename CommandBufferAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
    Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
  {
    std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
    Result                                             result = static_cast<Result>(
      d.vkAllocateCommandBuffers( m_device,
                                  reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
                                  reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
  }

  template <typename CommandBufferAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type
    Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo,
                                    CommandBufferAllocator &          commandBufferAllocator,
                                    Dispatch const &                  d ) const
  {
    std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount,
                                                                       commandBufferAllocator );
    Result                                             result = static_cast<Result>(
      d.vkAllocateCommandBuffers( m_device,
                                  reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
                                  reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
    return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch, typename CommandBufferAllocator>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
    Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
  {
    std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
    std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
    Result                     result = static_cast<Result>(
      d.vkAllocateCommandBuffers( m_device,
                                  reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
                                  reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
    {
      uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
      PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
      for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ )
      {
        uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
      }
    }
    return createResultValue(
      result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
  }

  template <typename Dispatch,
            typename CommandBufferAllocator,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value,
                                    int>::type>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
    Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo,
                                          CommandBufferAllocator &          commandBufferAllocator,
                                          Dispatch const &                  d ) const
  {
    std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers(
      commandBufferAllocator );
    std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
    Result                     result = static_cast<Result>(
      d.vkAllocateCommandBuffers( m_device,
                                  reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ),
                                  reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
    {
      uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
      PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
      for ( size_t i = 0; i < allocateInfo.commandBufferCount; i++ )
      {
        uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
      }
    }
    return createResultValue(
      result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool           commandPool,
                                                     uint32_t                                    commandBufferCount,
                                                     const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkFreeCommandBuffers( m_device,
                            static_cast<VkCommandPool>( commandPool ),
                            commandBufferCount,
                            reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool                             commandPool,
                                ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkFreeCommandBuffers( m_device,
                            static_cast<VkCommandPool>( commandPool ),
                            commandBuffers.size(),
                            reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool           commandPool,
                                       uint32_t                                    commandBufferCount,
                                       const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
                                       Dispatch const &                            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkFreeCommandBuffers( m_device,
                            static_cast<VkCommandPool>( commandPool ),
                            commandBufferCount,
                            reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool                             commandPool,
                                       ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkFreeCommandBuffers( m_device,
                            static_cast<VkCommandPool>( commandPool ),
                            commandBuffers.size(),
                            reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin(
    const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    CommandBuffer::end( Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset(
    VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type
    CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
  {
    Result result =
      static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
                                                      VULKAN_HPP_NAMESPACE::Pipeline          pipeline,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBindPipeline(
      m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t                               firstViewport,
                                                     uint32_t                               viewportCount,
                                                     const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetViewport(
      m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
                                                     ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetViewport(
      m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t                             firstScissor,
                                                    uint32_t                             scissorCount,
                                                    const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
                                                    Dispatch const &                     d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t                                               firstScissor,
                                                    ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetScissor(
      m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float            depthBiasConstantFactor,
                                                      float            depthBiasClamp,
                                                      float            depthBiasSlopeFactor,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float      blendConstants[4],
                                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float            minDepthBounds,
                                                        float            maxDepthBounds,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
                                                               uint32_t                               compareMask,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
                                                             uint32_t                               writeMask,
                                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
                                                             uint32_t                               reference,
                                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
                                                            VULKAN_HPP_NAMESPACE::PipelineLayout    layout,
                                                            uint32_t                                firstSet,
                                                            uint32_t                                descriptorSetCount,
                                                            const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
                                                            uint32_t         dynamicOffsetCount,
                                                            const uint32_t * pDynamicOffsets,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBindDescriptorSets( m_commandBuffer,
                               static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
                               static_cast<VkPipelineLayout>( layout ),
                               firstSet,
                               descriptorSetCount,
                               reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ),
                               dynamicOffsetCount,
                               pDynamicOffsets );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint                       pipelineBindPoint,
                                       VULKAN_HPP_NAMESPACE::PipelineLayout                          layout,
                                       uint32_t                                                      firstSet,
                                       ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
                                       ArrayProxy<const uint32_t> const &                            dynamicOffsets,
                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBindDescriptorSets( m_commandBuffer,
                               static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
                               static_cast<VkPipelineLayout>( layout ),
                               firstSet,
                               descriptorSets.size(),
                               reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ),
                               dynamicOffsets.size(),
                               dynamicOffsets.data() );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                         VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                         VULKAN_HPP_NAMESPACE::IndexType  indexType,
                                                         Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBindIndexBuffer( m_commandBuffer,
                            static_cast<VkBuffer>( buffer ),
                            static_cast<VkDeviceSize>( offset ),
                            static_cast<VkIndexType>( indexType ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t                                 firstBinding,
                                                           uint32_t                                 bindingCount,
                                                           const VULKAN_HPP_NAMESPACE::Buffer *     pBuffers,
                                                           const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
                                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBindVertexBuffers( m_commandBuffer,
                              firstBinding,
                              bindingCount,
                              reinterpret_cast<const VkBuffer *>( pBuffers ),
                              reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::bindVertexBuffers( uint32_t                                                   firstBinding,
                                      ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &     buffers,
                                      ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
#  else
    if ( buffers.size() != offsets.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    d.vkCmdBindVertexBuffers( m_commandBuffer,
                              firstBinding,
                              buffers.size(),
                              reinterpret_cast<const VkBuffer *>( buffers.data() ),
                              reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t         vertexCount,
                                              uint32_t         instanceCount,
                                              uint32_t         firstVertex,
                                              uint32_t         firstInstance,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t         indexCount,
                                                     uint32_t         instanceCount,
                                                     uint32_t         firstIndex,
                                                     int32_t          vertexOffset,
                                                     uint32_t         firstInstance,
                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                      VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                      uint32_t                         drawCount,
                                                      uint32_t                         stride,
                                                      Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndirect(
      m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                             VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                             uint32_t                         drawCount,
                                                             uint32_t                         stride,
                                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndexedIndirect(
      m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t         groupCountX,
                                                  uint32_t         groupCountY,
                                                  uint32_t         groupCountZ,
                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                          VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                          Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer             srcBuffer,
                                                    VULKAN_HPP_NAMESPACE::Buffer             dstBuffer,
                                                    uint32_t                                 regionCount,
                                                    const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyBuffer( m_commandBuffer,
                       static_cast<VkBuffer>( srcBuffer ),
                       static_cast<VkBuffer>( dstBuffer ),
                       regionCount,
                       reinterpret_cast<const VkBufferCopy *>( pRegions ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
                                                    VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
                                                    ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyBuffer( m_commandBuffer,
                       static_cast<VkBuffer>( srcBuffer ),
                       static_cast<VkBuffer>( dstBuffer ),
                       regions.size(),
                       reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image             srcImage,
                                                   VULKAN_HPP_NAMESPACE::ImageLayout       srcImageLayout,
                                                   VULKAN_HPP_NAMESPACE::Image             dstImage,
                                                   VULKAN_HPP_NAMESPACE::ImageLayout       dstImageLayout,
                                                   uint32_t                                regionCount,
                                                   const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
                                                   Dispatch const &                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyImage( m_commandBuffer,
                      static_cast<VkImage>( srcImage ),
                      static_cast<VkImageLayout>( srcImageLayout ),
                      static_cast<VkImage>( dstImage ),
                      static_cast<VkImageLayout>( dstImageLayout ),
                      regionCount,
                      reinterpret_cast<const VkImageCopy *>( pRegions ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image       srcImage,
                                                   VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
                                                   VULKAN_HPP_NAMESPACE::Image       dstImage,
                                                   VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
                                                   ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
                                                   Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyImage( m_commandBuffer,
                      static_cast<VkImage>( srcImage ),
                      static_cast<VkImageLayout>( srcImageLayout ),
                      static_cast<VkImage>( dstImage ),
                      static_cast<VkImageLayout>( dstImageLayout ),
                      regions.size(),
                      reinterpret_cast<const VkImageCopy *>( regions.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image             srcImage,
                                                   VULKAN_HPP_NAMESPACE::ImageLayout       srcImageLayout,
                                                   VULKAN_HPP_NAMESPACE::Image             dstImage,
                                                   VULKAN_HPP_NAMESPACE::ImageLayout       dstImageLayout,
                                                   uint32_t                                regionCount,
                                                   const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,
                                                   VULKAN_HPP_NAMESPACE::Filter            filter,
                                                   Dispatch const &                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBlitImage( m_commandBuffer,
                      static_cast<VkImage>( srcImage ),
                      static_cast<VkImageLayout>( srcImageLayout ),
                      static_cast<VkImage>( dstImage ),
                      static_cast<VkImageLayout>( dstImageLayout ),
                      regionCount,
                      reinterpret_cast<const VkImageBlit *>( pRegions ),
                      static_cast<VkFilter>( filter ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image       srcImage,
                                                   VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
                                                   VULKAN_HPP_NAMESPACE::Image       dstImage,
                                                   VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
                                                   ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
                                                   VULKAN_HPP_NAMESPACE::Filter                              filter,
                                                   Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBlitImage( m_commandBuffer,
                      static_cast<VkImage>( srcImage ),
                      static_cast<VkImageLayout>( srcImageLayout ),
                      static_cast<VkImage>( dstImage ),
                      static_cast<VkImageLayout>( dstImageLayout ),
                      regions.size(),
                      reinterpret_cast<const VkImageBlit *>( regions.data() ),
                      static_cast<VkFilter>( filter ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer                  srcBuffer,
                                                           VULKAN_HPP_NAMESPACE::Image                   dstImage,
                                                           VULKAN_HPP_NAMESPACE::ImageLayout             dstImageLayout,
                                                           uint32_t                                      regionCount,
                                                           const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
                                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyBufferToImage( m_commandBuffer,
                              static_cast<VkBuffer>( srcBuffer ),
                              static_cast<VkImage>( dstImage ),
                              static_cast<VkImageLayout>( dstImageLayout ),
                              regionCount,
                              reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer                                    srcBuffer,
                                      VULKAN_HPP_NAMESPACE::Image                                     dstImage,
                                      VULKAN_HPP_NAMESPACE::ImageLayout                               dstImageLayout,
                                      ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyBufferToImage( m_commandBuffer,
                              static_cast<VkBuffer>( srcBuffer ),
                              static_cast<VkImage>( dstImage ),
                              static_cast<VkImageLayout>( dstImageLayout ),
                              regions.size(),
                              reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image                   srcImage,
                                                           VULKAN_HPP_NAMESPACE::ImageLayout             srcImageLayout,
                                                           VULKAN_HPP_NAMESPACE::Buffer                  dstBuffer,
                                                           uint32_t                                      regionCount,
                                                           const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
                                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyImageToBuffer( m_commandBuffer,
                              static_cast<VkImage>( srcImage ),
                              static_cast<VkImageLayout>( srcImageLayout ),
                              static_cast<VkBuffer>( dstBuffer ),
                              regionCount,
                              reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image                                     srcImage,
                                      VULKAN_HPP_NAMESPACE::ImageLayout                               srcImageLayout,
                                      VULKAN_HPP_NAMESPACE::Buffer                                    dstBuffer,
                                      ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyImageToBuffer( m_commandBuffer,
                              static_cast<VkImage>( srcImage ),
                              static_cast<VkImageLayout>( srcImageLayout ),
                              static_cast<VkBuffer>( dstBuffer ),
                              regions.size(),
                              reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer     dstBuffer,
                                                      VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
                                                      VULKAN_HPP_NAMESPACE::DeviceSize dataSize,
                                                      const void *                     pData,
                                                      Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdUpdateBuffer( m_commandBuffer,
                         static_cast<VkBuffer>( dstBuffer ),
                         static_cast<VkDeviceSize>( dstOffset ),
                         static_cast<VkDeviceSize>( dataSize ),
                         pData );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename T, typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer     dstBuffer,
                                                      VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
                                                      ArrayProxy<const T> const &      data,
                                                      Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdUpdateBuffer( m_commandBuffer,
                         static_cast<VkBuffer>( dstBuffer ),
                         static_cast<VkDeviceSize>( dstOffset ),
                         data.size() * sizeof( T ),
                         reinterpret_cast<const void *>( data.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer     dstBuffer,
                                                    VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
                                                    VULKAN_HPP_NAMESPACE::DeviceSize size,
                                                    uint32_t                         data,
                                                    Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdFillBuffer( m_commandBuffer,
                       static_cast<VkBuffer>( dstBuffer ),
                       static_cast<VkDeviceSize>( dstOffset ),
                       static_cast<VkDeviceSize>( size ),
                       data );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image                   image,
                                                         VULKAN_HPP_NAMESPACE::ImageLayout             imageLayout,
                                                         const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,
                                                         uint32_t                                      rangeCount,
                                                         const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdClearColorImage( m_commandBuffer,
                            static_cast<VkImage>( image ),
                            static_cast<VkImageLayout>( imageLayout ),
                            reinterpret_cast<const VkClearColorValue *>( pColor ),
                            rangeCount,
                            reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image                                           image,
                                    VULKAN_HPP_NAMESPACE::ImageLayout                                     imageLayout,
                                    const ClearColorValue &                                               color,
                                    ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdClearColorImage( m_commandBuffer,
                            static_cast<VkImage>( image ),
                            static_cast<VkImageLayout>( imageLayout ),
                            reinterpret_cast<const VkClearColorValue *>( &color ),
                            ranges.size(),
                            reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image                          image,
                                           VULKAN_HPP_NAMESPACE::ImageLayout                    imageLayout,
                                           const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
                                           uint32_t                                             rangeCount,
                                           const VULKAN_HPP_NAMESPACE::ImageSubresourceRange *  pRanges,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdClearDepthStencilImage( m_commandBuffer,
                                   static_cast<VkImage>( image ),
                                   static_cast<VkImageLayout>( imageLayout ),
                                   reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ),
                                   rangeCount,
                                   reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image       image,
                                           VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
                                           const ClearDepthStencilValue &    depthStencil,
                                           ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdClearDepthStencilImage( m_commandBuffer,
                                   static_cast<VkImage>( image ),
                                   static_cast<VkImageLayout>( imageLayout ),
                                   reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
                                   ranges.size(),
                                   reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t                                      attachmentCount,
                                                          const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
                                                          uint32_t                                      rectCount,
                                                          const VULKAN_HPP_NAMESPACE::ClearRect *       pRects,
                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdClearAttachments( m_commandBuffer,
                             attachmentCount,
                             reinterpret_cast<const VkClearAttachment *>( pAttachments ),
                             rectCount,
                             reinterpret_cast<const VkClearRect *>( pRects ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
                                     ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const &       rects,
                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdClearAttachments( m_commandBuffer,
                             attachments.size(),
                             reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
                             rects.size(),
                             reinterpret_cast<const VkClearRect *>( rects.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image                srcImage,
                                                      VULKAN_HPP_NAMESPACE::ImageLayout          srcImageLayout,
                                                      VULKAN_HPP_NAMESPACE::Image                dstImage,
                                                      VULKAN_HPP_NAMESPACE::ImageLayout          dstImageLayout,
                                                      uint32_t                                   regionCount,
                                                      const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdResolveImage( m_commandBuffer,
                         static_cast<VkImage>( srcImage ),
                         static_cast<VkImageLayout>( srcImageLayout ),
                         static_cast<VkImage>( dstImage ),
                         static_cast<VkImageLayout>( dstImageLayout ),
                         regionCount,
                         reinterpret_cast<const VkImageResolve *>( pRegions ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image                                  srcImage,
                                 VULKAN_HPP_NAMESPACE::ImageLayout                            srcImageLayout,
                                 VULKAN_HPP_NAMESPACE::Image                                  dstImage,
                                 VULKAN_HPP_NAMESPACE::ImageLayout                            dstImageLayout,
                                 ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
                                 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdResolveImage( m_commandBuffer,
                         static_cast<VkImage>( srcImage ),
                         static_cast<VkImageLayout>( srcImageLayout ),
                         static_cast<VkImage>( dstImage ),
                         static_cast<VkImageLayout>( dstImageLayout ),
                         regions.size(),
                         reinterpret_cast<const VkImageResolve *>( regions.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event              event,
                                                  VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
                                                  Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event              event,
                                                    VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::waitEvents( uint32_t                                          eventCount,
                               const VULKAN_HPP_NAMESPACE::Event *               pEvents,
                               VULKAN_HPP_NAMESPACE::PipelineStageFlags          srcStageMask,
                               VULKAN_HPP_NAMESPACE::PipelineStageFlags          dstStageMask,
                               uint32_t                                          memoryBarrierCount,
                               const VULKAN_HPP_NAMESPACE::MemoryBarrier *       pMemoryBarriers,
                               uint32_t                                          bufferMemoryBarrierCount,
                               const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
                               uint32_t                                          imageMemoryBarrierCount,
                               const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier *  pImageMemoryBarriers,
                               Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWaitEvents( m_commandBuffer,
                       eventCount,
                       reinterpret_cast<const VkEvent *>( pEvents ),
                       static_cast<VkPipelineStageFlags>( srcStageMask ),
                       static_cast<VkPipelineStageFlags>( dstStageMask ),
                       memoryBarrierCount,
                       reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
                       bufferMemoryBarrierCount,
                       reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
                       imageMemoryBarrierCount,
                       reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const &               events,
                               VULKAN_HPP_NAMESPACE::PipelineStageFlags                            srcStageMask,
                               VULKAN_HPP_NAMESPACE::PipelineStageFlags                            dstStageMask,
                               ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const &       memoryBarriers,
                               ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
                               ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const &  imageMemoryBarriers,
                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWaitEvents( m_commandBuffer,
                       events.size(),
                       reinterpret_cast<const VkEvent *>( events.data() ),
                       static_cast<VkPipelineStageFlags>( srcStageMask ),
                       static_cast<VkPipelineStageFlags>( dstStageMask ),
                       memoryBarriers.size(),
                       reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
                       bufferMemoryBarriers.size(),
                       reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
                       imageMemoryBarriers.size(),
                       reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags          srcStageMask,
                                    VULKAN_HPP_NAMESPACE::PipelineStageFlags          dstStageMask,
                                    VULKAN_HPP_NAMESPACE::DependencyFlags             dependencyFlags,
                                    uint32_t                                          memoryBarrierCount,
                                    const VULKAN_HPP_NAMESPACE::MemoryBarrier *       pMemoryBarriers,
                                    uint32_t                                          bufferMemoryBarrierCount,
                                    const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
                                    uint32_t                                          imageMemoryBarrierCount,
                                    const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier *  pImageMemoryBarriers,
                                    Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPipelineBarrier( m_commandBuffer,
                            static_cast<VkPipelineStageFlags>( srcStageMask ),
                            static_cast<VkPipelineStageFlags>( dstStageMask ),
                            static_cast<VkDependencyFlags>( dependencyFlags ),
                            memoryBarrierCount,
                            reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
                            bufferMemoryBarrierCount,
                            reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
                            imageMemoryBarrierCount,
                            reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier(
    VULKAN_HPP_NAMESPACE::PipelineStageFlags                            srcStageMask,
    VULKAN_HPP_NAMESPACE::PipelineStageFlags                            dstStageMask,
    VULKAN_HPP_NAMESPACE::DependencyFlags                               dependencyFlags,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const &       memoryBarriers,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const &  imageMemoryBarriers,
    Dispatch const &                                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPipelineBarrier( m_commandBuffer,
                            static_cast<VkPipelineStageFlags>( srcStageMask ),
                            static_cast<VkPipelineStageFlags>( dstStageMask ),
                            static_cast<VkDependencyFlags>( dependencyFlags ),
                            memoryBarriers.size(),
                            reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
                            bufferMemoryBarriers.size(),
                            reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
                            imageMemoryBarriers.size(),
                            reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool         queryPool,
                                                    uint32_t                                query,
                                                    VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginQuery(
      m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                                                  uint32_t                        query,
                                                  Dispatch const &                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                                                        uint32_t                        firstQuery,
                                                        uint32_t                        queryCount,
                                                        Dispatch const &                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
                                                        VULKAN_HPP_NAMESPACE::QueryPool             queryPool,
                                                        uint32_t                                    query,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWriteTimestamp( m_commandBuffer,
                           static_cast<VkPipelineStageFlagBits>( pipelineStage ),
                           static_cast<VkQueryPool>( queryPool ),
                           query );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool        queryPool,
                                                              uint32_t                               firstQuery,
                                                              uint32_t                               queryCount,
                                                              VULKAN_HPP_NAMESPACE::Buffer           dstBuffer,
                                                              VULKAN_HPP_NAMESPACE::DeviceSize       dstOffset,
                                                              VULKAN_HPP_NAMESPACE::DeviceSize       stride,
                                                              VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyQueryPoolResults( m_commandBuffer,
                                 static_cast<VkQueryPool>( queryPool ),
                                 firstQuery,
                                 queryCount,
                                 static_cast<VkBuffer>( dstBuffer ),
                                 static_cast<VkDeviceSize>( dstOffset ),
                                 static_cast<VkDeviceSize>( stride ),
                                 static_cast<VkQueryResultFlags>( flags ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout   layout,
                                                       VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
                                                       uint32_t                               offset,
                                                       uint32_t                               size,
                                                       const void *                           pValues,
                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPushConstants( m_commandBuffer,
                          static_cast<VkPipelineLayout>( layout ),
                          static_cast<VkShaderStageFlags>( stageFlags ),
                          offset,
                          size,
                          pValues );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename T, typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout   layout,
                                                       VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
                                                       uint32_t                               offset,
                                                       ArrayProxy<const T> const &            values,
                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPushConstants( m_commandBuffer,
                          static_cast<VkPipelineLayout>( layout ),
                          static_cast<VkShaderStageFlags>( stageFlags ),
                          offset,
                          values.size() * sizeof( T ),
                          reinterpret_cast<const void *>( values.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
                                    VULKAN_HPP_NAMESPACE::SubpassContents             contents,
                                    Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginRenderPass( m_commandBuffer,
                            reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
                            static_cast<VkSubpassContents>( contents ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo &           renderPassBegin,
                                                         VULKAN_HPP_NAMESPACE::SubpassContents contents,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginRenderPass( m_commandBuffer,
                            reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
                            static_cast<VkSubpassContents>( contents ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents,
                                                     Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndRenderPass( m_commandBuffer );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t                                    commandBufferCount,
                                                         const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdExecuteCommands(
      m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdExecuteCommands(
      m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_VERSION_1_1 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t *       pApiVersion,
                                                                          Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
  {
    uint32_t apiVersion;
    Result   result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
    return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::bindBufferMemory2( uint32_t                                           bindInfoCount,
                               const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
                               Dispatch const &                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkBindBufferMemory2(
      m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
                               Dispatch const &                                                     d ) const
  {
    Result result = static_cast<Result>( d.vkBindBufferMemory2(
      m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::bindImageMemory2( uint32_t                                          bindInfoCount,
                              const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
                              Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
                              Dispatch const &                                                    d ) const
  {
    Result result = static_cast<Result>( d.vkBindImageMemory2(
      m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getGroupPeerMemoryFeatures( uint32_t                                       heapIndex,
                                        uint32_t                                       localDeviceIndex,
                                        uint32_t                                       remoteDeviceIndex,
                                        VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
                                        Dispatch const &                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetDeviceGroupPeerMemoryFeatures( m_device,
                                          heapIndex,
                                          localDeviceIndex,
                                          remoteDeviceIndex,
                                          reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
                                         Device::getGroupPeerMemoryFeatures( uint32_t         heapIndex,
                                        uint32_t         localDeviceIndex,
                                        uint32_t         remoteDeviceIndex,
                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
    d.vkGetDeviceGroupPeerMemoryFeatures( m_device,
                                          heapIndex,
                                          localDeviceIndex,
                                          remoteDeviceIndex,
                                          reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
    return peerMemoryFeatures;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t         deviceMask,
                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t         baseGroupX,
                                                      uint32_t         baseGroupY,
                                                      uint32_t         baseGroupZ,
                                                      uint32_t         groupCountX,
                                                      uint32_t         groupCountY,
                                                      uint32_t         groupCountZ,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups(
    uint32_t *                                            pPhysicalDeviceGroupCount,
    VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
    Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
      m_instance,
      pPhysicalDeviceGroupCount,
      reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
    Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
  {
    std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
    uint32_t                                                                           physicalDeviceGroupCount;
    Result                                                                             result;
    do
    {
      result =
        static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
      {
        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
          m_instance,
          &physicalDeviceGroupCount,
          reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
        VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
    {
      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
    }
    return createResultValue(
      result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
  }

  template <
    typename PhysicalDeviceGroupPropertiesAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
    Instance::enumeratePhysicalDeviceGroups(
      PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
  {
    std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
      physicalDeviceGroupPropertiesAllocator );
    uint32_t physicalDeviceGroupCount;
    Result   result;
    do
    {
      result =
        static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
      {
        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
          m_instance,
          &physicalDeviceGroupCount,
          reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
        VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
    {
      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
    }
    return createResultValue(
      result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
                                         VULKAN_HPP_NAMESPACE::MemoryRequirements2 *                pMemoryRequirements,
                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetImageMemoryRequirements2( m_device,
                                     reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ),
                                     reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
                                         Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info,
                                         Dispatch const &                     d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
    d.vkGetImageMemoryRequirements2( m_device,
                                     reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
                                     reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return memoryRequirements;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info,
                                         Dispatch const &                     d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                  structureChain;
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
      structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
    d.vkGetImageMemoryRequirements2( m_device,
                                     reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
                                     reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
                                          VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
                                          Dispatch const &                            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetBufferMemoryRequirements2( m_device,
                                      reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
                                      reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
                                         Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info,
                                          Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
    d.vkGetBufferMemoryRequirements2( m_device,
                                      reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
                                      reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return memoryRequirements;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info,
                                          Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                  structureChain;
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
      structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
    d.vkGetBufferMemoryRequirements2( m_device,
                                      reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
                                      reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2(
    const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
    uint32_t *                                                       pSparseMemoryRequirementCount,
    VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *           pSparseMemoryRequirements,
    Dispatch const &                                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetImageSparseMemoryRequirements2(
      m_device,
      reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
      pSparseMemoryRequirementCount,
      reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
                      Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info,
                                               Dispatch const &                           d ) const
  {
    std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
    uint32_t                                                                             sparseMemoryRequirementCount;
    d.vkGetImageSparseMemoryRequirements2( m_device,
                                           reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
                                           &sparseMemoryRequirementCount,
                                           nullptr );
    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
    d.vkGetImageSparseMemoryRequirements2(
      m_device,
      reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
      &sparseMemoryRequirementCount,
      reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
    return sparseMemoryRequirements;
  }

  template <
    typename SparseImageMemoryRequirements2Allocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
                      Device::getImageSparseMemoryRequirements2(
      const ImageSparseMemoryRequirementsInfo2 & info,
      SparseImageMemoryRequirements2Allocator &  sparseImageMemoryRequirements2Allocator,
      Dispatch const &                           d ) const
  {
    std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
      sparseImageMemoryRequirements2Allocator );
    uint32_t sparseMemoryRequirementCount;
    d.vkGetImageSparseMemoryRequirements2( m_device,
                                           reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
                                           &sparseMemoryRequirementCount,
                                           nullptr );
    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
    d.vkGetImageSparseMemoryRequirements2(
      m_device,
      reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
      &sparseMemoryRequirementCount,
      reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
    return sparseMemoryRequirements;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
                                         PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
    return features;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                      structureChain;
    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
      structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
                                      reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
                                         PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
    d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
                                      reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
    return properties;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                        structureChain;
    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
      structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
    d.vkGetPhysicalDeviceProperties2( m_physicalDevice,
                                      reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format              format,
                                          VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
                                          Dispatch const &                          d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceFormatProperties2(
      m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
                                         PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
                                          Dispatch const &             d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
    d.vkGetPhysicalDeviceFormatProperties2(
      m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
    return formatProperties;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
                                          Dispatch const &             d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                structureChain;
    VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
      structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
    d.vkGetPhysicalDeviceFormatProperties2(
      m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 *               pImageFormatProperties,
    Dispatch const &                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
      reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
    PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
                                               Dispatch const &                       d ) const
  {
    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
      reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
    return createResultValue(
      result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
                                          PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
                                               Dispatch const &                       d ) const
  {
    StructureChain<X, Y, Z...>                     structureChain;
    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
      structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
      reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
    return createResultValue(
      result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getQueueFamilyProperties2( uint32_t *                                     pQueueFamilyPropertyCount,
                                               VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceQueueFamilyProperties2(
      m_physicalDevice,
      pQueueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename QueueFamilyProperties2Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
                                         PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
  {
    std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
    uint32_t                                                             queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    queueFamilyProperties.resize( queueFamilyPropertyCount );
    d.vkGetPhysicalDeviceQueueFamilyProperties2(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    return queueFamilyProperties;
  }

  template <typename QueueFamilyProperties2Allocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
                                         PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
                                               Dispatch const &                  d ) const
  {
    std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties(
      queueFamilyProperties2Allocator );
    uint32_t queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    queueFamilyProperties.resize( queueFamilyPropertyCount );
    d.vkGetPhysicalDeviceQueueFamilyProperties2(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    return queueFamilyProperties;
  }

  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
                                         PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
  {
    uint32_t queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    std::vector<StructureChain, StructureChainAllocator>      returnVector( queueFamilyPropertyCount );
    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
    {
      queueFamilyProperties[i].pNext =
        returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
    }
    d.vkGetPhysicalDeviceQueueFamilyProperties2(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
    {
      returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
    }
    return returnVector;
  }

  template <typename StructureChain,
            typename StructureChainAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
                                         PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator,
                                               Dispatch const &          d ) const
  {
    uint32_t queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    std::vector<StructureChain, StructureChainAllocator>      returnVector( queueFamilyPropertyCount,
                                                                       structureChainAllocator );
    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
    {
      queueFamilyProperties[i].pNext =
        returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
    }
    d.vkGetPhysicalDeviceQueueFamilyProperties2(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
    {
      returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
    }
    return returnVector;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceMemoryProperties2(
      m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
                                         PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
    d.vkGetPhysicalDeviceMemoryProperties2(
      m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
    return memoryProperties;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                              structureChain;
    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
      structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
    d.vkGetPhysicalDeviceMemoryProperties2(
      m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
    uint32_t *                                                         pPropertyCount,
    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 *               pProperties,
    Dispatch const &                                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceSparseImageFormatProperties2(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
      pPropertyCount,
      reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
                      PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
                                                     Dispatch const &                             d ) const
  {
    std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
    uint32_t                                                                         propertyCount;
    d.vkGetPhysicalDeviceSparseImageFormatProperties2(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
      &propertyCount,
      nullptr );
    properties.resize( propertyCount );
    d.vkGetPhysicalDeviceSparseImageFormatProperties2(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
      &propertyCount,
      reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
    return properties;
  }

  template <
    typename SparseImageFormatProperties2Allocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
                      PhysicalDevice::getSparseImageFormatProperties2(
      const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
      SparseImageFormatProperties2Allocator &      sparseImageFormatProperties2Allocator,
      Dispatch const &                             d ) const
  {
    std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties(
      sparseImageFormatProperties2Allocator );
    uint32_t propertyCount;
    d.vkGetPhysicalDeviceSparseImageFormatProperties2(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
      &propertyCount,
      nullptr );
    properties.resize( propertyCount );
    d.vkGetPhysicalDeviceSparseImageFormatProperties2(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
      &propertyCount,
      reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
    return properties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool          commandPool,
                                                  VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkTrimCommandPool(
      m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
                                            VULKAN_HPP_NAMESPACE::Queue *                  pQueue,
                                            Dispatch const &                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetDeviceQueue2(
      m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
                                         Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::Queue queue;
    d.vkGetDeviceQueue2(
      m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
    return queue;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks *              pAllocator,
                                          VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
                                          Dispatch const &                               d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateSamplerYcbcrConversion( m_device,
                                        reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                        reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
    Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo,
                                          Optional<const AllocationCallbacks>      allocator,
                                          Dispatch const &                         d ) const
  {
    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
    Result                                       result = static_cast<Result>(
      d.vkCreateSamplerYcbcrConversion( m_device,
                                        reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
    return createResultValue(
      result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
    Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
                                                Optional<const AllocationCallbacks>      allocator,
                                                Dispatch const &                         d ) const
  {
    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
    Result                                       result = static_cast<Result>(
      d.vkCreateSamplerYcbcrConversion( m_device,
                                        reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>(
      result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion      ycbcrConversion,
                                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySamplerYcbcrConversion( m_device,
                                       static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
                                           Optional<const AllocationCallbacks>          allocator,
                                           Dispatch const &                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySamplerYcbcrConversion(
      m_device,
      static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion      ycbcrConversion,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySamplerYcbcrConversion( m_device,
                                       static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
                                          Optional<const AllocationCallbacks>          allocator,
                                          Dispatch const &                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySamplerYcbcrConversion(
      m_device,
      static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate(
    const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
    const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                pAllocator,
    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate *                 pDescriptorUpdateTemplate,
    Dispatch const &                                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
      m_device,
      reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
      reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
    Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo,
                                            Optional<const AllocationCallbacks>        allocator,
                                            Dispatch const &                           d ) const
  {
    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
    Result                                         result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
      m_device,
      reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
    return createResultValue(
      result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
    Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
                                                  Optional<const AllocationCallbacks>        allocator,
                                                  Dispatch const &                           d ) const
  {
    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
    Result                                         result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate(
      m_device,
      reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
    ObjectDestroy<Device, Dispatch>                deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
      result,
      descriptorUpdateTemplate,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique",
      deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate    descriptorUpdateTemplate,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorUpdateTemplate( m_device,
                                         static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
                                         reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                                             Optional<const AllocationCallbacks>            allocator,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorUpdateTemplate(
      m_device,
      static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate    descriptorUpdateTemplate,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorUpdateTemplate( m_device,
                                         static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
                                         reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                                          Optional<const AllocationCallbacks>            allocator,
                                          Dispatch const &                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorUpdateTemplate(
      m_device,
      static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet            descriptorSet,
                                             VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                                             const void *                                   pData,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkUpdateDescriptorSetWithTemplate( m_device,
                                         static_cast<VkDescriptorSet>( descriptorSet ),
                                         static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
                                         pData );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
    VULKAN_HPP_NAMESPACE::ExternalBufferProperties *               pExternalBufferProperties,
    Dispatch const &                                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceExternalBufferProperties(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
      reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
                                         PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
                                                 Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
    d.vkGetPhysicalDeviceExternalBufferProperties(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
      reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
    return externalBufferProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
    VULKAN_HPP_NAMESPACE::ExternalFenceProperties *               pExternalFenceProperties,
    Dispatch const &                                              d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceExternalFenceProperties(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
      reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
                                         PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
                                                Dispatch const &                        d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
    d.vkGetPhysicalDeviceExternalFenceProperties(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
      reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
    return externalFenceProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties *               pExternalSemaphoreProperties,
    Dispatch const &                                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceExternalSemaphoreProperties(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
      reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
                                         PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
    d.vkGetPhysicalDeviceExternalSemaphoreProperties(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
      reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
    return externalSemaphoreProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
                                           VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport *          pSupport,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetDescriptorSetLayoutSupport( m_device,
                                       reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
                                       reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
                                         Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo,
                                           Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
    d.vkGetDescriptorSetLayoutSupport( m_device,
                                       reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
                                       reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
    return support;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo,
                                           Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                         structureChain;
    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
      structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
    d.vkGetDescriptorSetLayoutSupport( m_device,
                                       reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
                                       reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_VERSION_1_2 ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                           VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                           VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                                           VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                                           uint32_t                         maxDrawCount,
                                                           uint32_t                         stride,
                                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndirectCount( m_commandBuffer,
                              static_cast<VkBuffer>( buffer ),
                              static_cast<VkDeviceSize>( offset ),
                              static_cast<VkBuffer>( countBuffer ),
                              static_cast<VkDeviceSize>( countBufferOffset ),
                              maxDrawCount,
                              stride );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                                  VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                                  VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                                                  VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                                                  uint32_t                         maxDrawCount,
                                                                  uint32_t                         stride,
                                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
                                     static_cast<VkBuffer>( buffer ),
                                     static_cast<VkDeviceSize>( offset ),
                                     static_cast<VkBuffer>( countBuffer ),
                                     static_cast<VkDeviceSize>( countBufferOffset ),
                                     maxDrawCount,
                                     stride );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                               VULKAN_HPP_NAMESPACE::RenderPass *                  pRenderPass,
                               Dispatch const &                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateRenderPass2( m_device,
                                                       reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
                                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                       reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
    Device::createRenderPass2( const RenderPassCreateInfo2 &       createInfo,
                               Optional<const AllocationCallbacks> allocator,
                               Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
    Result                           result = static_cast<Result>(
      d.vkCreateRenderPass2( m_device,
                             reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
    Device::createRenderPass2Unique( const RenderPassCreateInfo2 &       createInfo,
                                     Optional<const AllocationCallbacks> allocator,
                                     Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
    Result                           result = static_cast<Result>(
      d.vkCreateRenderPass2( m_device,
                             reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
      result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
                                     const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *    pSubpassBeginInfo,
                                     Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginRenderPass2( m_commandBuffer,
                             reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
                             reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin,
                                                          const SubpassBeginInfo &    subpassBeginInfo,
                                                          Dispatch const &            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginRenderPass2( m_commandBuffer,
                             reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
                             reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
                                                      const VULKAN_HPP_NAMESPACE::SubpassEndInfo *   pSubpassEndInfo,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdNextSubpass2( m_commandBuffer,
                         reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
                         reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo,
                                                      const SubpassEndInfo &   subpassEndInfo,
                                                      Dispatch const &         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdNextSubpass2( m_commandBuffer,
                         reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
                         reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo,
                                                        Dispatch const &       d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                                                 uint32_t                        firstQuery,
                                                 uint32_t                        queryCount,
                                                 Dispatch const &                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue(
    VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
                                          Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
  {
    uint64_t value;
    Result   result =
      static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
    return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
                            uint64_t                                        timeout,
                            Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo,
                                                                        uint64_t                  timeout,
                                                                        Dispatch const &          d ) const
  {
    Result result = static_cast<Result>(
      d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore(
    const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<DeviceAddress>(
      d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress(
    const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info,
                                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE uint64_t
                    Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetDeviceMemoryOpaqueCaptureAddress(
      m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info,
                                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetDeviceMemoryOpaqueCaptureAddress(
      m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_surface ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR                  surface,
                                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySurfaceKHR(
      m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR    surface,
                                                      Optional<const AllocationCallbacks> allocator,
                                                      Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySurfaceKHR( m_instance,
                           static_cast<VkSurfaceKHR>( surface ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR                  surface,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySurfaceKHR(
      m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR    surface,
                                            Optional<const AllocationCallbacks> allocator,
                                            Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySurfaceKHR( m_instance,
                           static_cast<VkSurfaceKHR>( surface ),
                           reinterpret_cast<const VkAllocationCallbacks *>(
                             static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getSurfaceSupportKHR( uint32_t                         queueFamilyIndex,
                                          VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                          VULKAN_HPP_NAMESPACE::Bool32 *   pSupported,
                                          Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice,
                                                                        queueFamilyIndex,
                                                                        static_cast<VkSurfaceKHR>( surface ),
                                                                        reinterpret_cast<VkBool32 *>( pSupported ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
                                          PhysicalDevice::getSurfaceSupportKHR( uint32_t                         queueFamilyIndex,
                                          VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                          Dispatch const &                 d ) const
  {
    VULKAN_HPP_NAMESPACE::Bool32 supported;
    Result                       result =
      static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice,
                                                                   queueFamilyIndex,
                                                                   static_cast<VkSurfaceKHR>( surface ),
                                                                   reinterpret_cast<VkBool32 *>( &supported ) ) );
    return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR               surface,
                                               VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
      m_physicalDevice,
      static_cast<VkSurfaceKHR>( surface ),
      reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
    PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
      m_physicalDevice,
      static_cast<VkSurfaceKHR>( surface ),
      reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) );
    return createResultValue(
      result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR         surface,
                                          uint32_t *                               pSurfaceFormatCount,
                                          VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
                                          Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
                                              static_cast<VkSurfaceKHR>( surface ),
                                              pSurfaceFormatCount,
                                              reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename SurfaceFormatKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
    PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  {
    std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
    uint32_t                                                 surfaceFormatCount;
    Result                                                   result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
        m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
      {
        surfaceFormats.resize( surfaceFormatCount );
        result = static_cast<Result>(
          d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
                                                  static_cast<VkSurfaceKHR>( surface ),
                                                  &surfaceFormatCount,
                                                  reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
        VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
    {
      surfaceFormats.resize( surfaceFormatCount );
    }
    return createResultValue(
      result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
  }

  template <typename SurfaceFormatKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
    PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                          SurfaceFormatKHRAllocator &      surfaceFormatKHRAllocator,
                                          Dispatch const &                 d ) const
  {
    std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
    uint32_t                                                 surfaceFormatCount;
    Result                                                   result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
        m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
      {
        surfaceFormats.resize( surfaceFormatCount );
        result = static_cast<Result>(
          d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice,
                                                  static_cast<VkSurfaceKHR>( surface ),
                                                  &surfaceFormatCount,
                                                  reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
        VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
    {
      surfaceFormats.resize( surfaceFormatCount );
    }
    return createResultValue(
      result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR       surface,
                                               uint32_t *                             pPresentModeCount,
                                               VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
                                               Dispatch const &                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
                                                   static_cast<VkSurfaceKHR>( surface ),
                                                   pPresentModeCount,
                                                   reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PresentModeKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
    PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  {
    std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
    uint32_t                                             presentModeCount;
    Result                                               result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
        m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && presentModeCount )
      {
        presentModes.resize( presentModeCount );
        result = static_cast<Result>(
          d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
                                                       static_cast<VkSurfaceKHR>( surface ),
                                                       &presentModeCount,
                                                       reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
        VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
    {
      presentModes.resize( presentModeCount );
    }
    return createResultValue(
      result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
  }

  template <typename PresentModeKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
    PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                               PresentModeKHRAllocator &        presentModeKHRAllocator,
                                               Dispatch const &                 d ) const
  {
    std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
    uint32_t                                             presentModeCount;
    Result                                               result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
        m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && presentModeCount )
      {
        presentModes.resize( presentModeCount );
        result = static_cast<Result>(
          d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice,
                                                       static_cast<VkSurfaceKHR>( surface ),
                                                       &presentModeCount,
                                                       reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
        VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
    {
      presentModes.resize( presentModeCount );
    }
    return createResultValue(
      result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_swapchain ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks *    pAllocator,
                                VULKAN_HPP_NAMESPACE::SwapchainKHR *                 pSwapchain,
                                Dispatch const &                                     d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateSwapchainKHR( m_device,
                              reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
                              reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                              reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
    Device::createSwapchainKHR( const SwapchainCreateInfoKHR &      createInfo,
                                Optional<const AllocationCallbacks> allocator,
                                Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
    Result                             result = static_cast<Result>(
      d.vkCreateSwapchainKHR( m_device,
                              reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
                              reinterpret_cast<const VkAllocationCallbacks *>(
                                static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                              reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
    return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
    Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR &      createInfo,
                                      Optional<const AllocationCallbacks> allocator,
                                      Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
    Result                             result = static_cast<Result>(
      d.vkCreateSwapchainKHR( m_device,
                              reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
                              reinterpret_cast<const VkAllocationCallbacks *>(
                                static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                              reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>(
      result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR                swapchain,
                                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySwapchainKHR( m_device,
                             static_cast<VkSwapchainKHR>( swapchain ),
                             reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR  swapchain,
                                                      Optional<const AllocationCallbacks> allocator,
                                                      Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySwapchainKHR( m_device,
                             static_cast<VkSwapchainKHR>( swapchain ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR                swapchain,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySwapchainKHR( m_device,
                             static_cast<VkSwapchainKHR>( swapchain ),
                             reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR  swapchain,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySwapchainKHR( m_device,
                             static_cast<VkSwapchainKHR>( swapchain ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                   uint32_t *                         pSwapchainImageCount,
                                   VULKAN_HPP_NAMESPACE::Image *      pSwapchainImages,
                                   Dispatch const &                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
                                                           static_cast<VkSwapchainKHR>( swapchain ),
                                                           pSwapchainImageCount,
                                                           reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename ImageAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type
                       Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  {
    std::vector<Image, ImageAllocator> swapchainImages;
    uint32_t                           swapchainImageCount;
    Result                             result;
    do
    {
      result = static_cast<Result>( d.vkGetSwapchainImagesKHR(
        m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && swapchainImageCount )
      {
        swapchainImages.resize( swapchainImageCount );
        result =
          static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
                                                          static_cast<VkSwapchainKHR>( swapchain ),
                                                          &swapchainImageCount,
                                                          reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
        VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) )
    {
      swapchainImages.resize( swapchainImageCount );
    }
    return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
  }

  template <typename ImageAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type
                       Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                   ImageAllocator &                   imageAllocator,
                                   Dispatch const &                   d ) const
  {
    std::vector<Image, ImageAllocator> swapchainImages( imageAllocator );
    uint32_t                           swapchainImageCount;
    Result                             result;
    do
    {
      result = static_cast<Result>( d.vkGetSwapchainImagesKHR(
        m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && swapchainImageCount )
      {
        swapchainImages.resize( swapchainImageCount );
        result =
          static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device,
                                                          static_cast<VkSwapchainKHR>( swapchain ),
                                                          &swapchainImageCount,
                                                          reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
        VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) )
    {
      swapchainImages.resize( swapchainImageCount );
    }
    return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                 uint64_t                           timeout,
                                 VULKAN_HPP_NAMESPACE::Semaphore    semaphore,
                                 VULKAN_HPP_NAMESPACE::Fence        fence,
                                 uint32_t *                         pImageIndex,
                                 Dispatch const &                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkAcquireNextImageKHR( m_device,
                                                         static_cast<VkSwapchainKHR>( swapchain ),
                                                         timeout,
                                                         static_cast<VkSemaphore>( semaphore ),
                                                         static_cast<VkFence>( fence ),
                                                         pImageIndex ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t>
                                         Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                 uint64_t                           timeout,
                                 VULKAN_HPP_NAMESPACE::Semaphore    semaphore,
                                 VULKAN_HPP_NAMESPACE::Fence        fence,
                                 Dispatch const &                   d ) const
  {
    uint32_t imageIndex;
    Result   result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device,
                                                                  static_cast<VkSwapchainKHR>( swapchain ),
                                                                  timeout,
                                                                  static_cast<VkSemaphore>( semaphore ),
                                                                  static_cast<VkFence>( fence ),
                                                                  &imageIndex ) );
    return createResultValue( result,
                              imageIndex,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eTimeout,
                                VULKAN_HPP_NAMESPACE::Result::eNotReady,
                                VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR(
    const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo,
                                                                   Dispatch const &       d ) const
  {
    Result result =
      static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) );
    return createResultValue(
      result,
      VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR(
    VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities,
    Dispatch const &                                          d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR(
      m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
    Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
    Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR(
      m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) );
    return createResultValue(
      result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR                       surface,
                                            VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetDeviceGroupSurfacePresentModesKHR( m_device,
                                                static_cast<VkSurfaceKHR>( surface ),
                                                reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
    Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
    Result                                               result = static_cast<Result>(
      d.vkGetDeviceGroupSurfacePresentModesKHR( m_device,
                                                static_cast<VkSurfaceKHR>( surface ),
                                                reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
    return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                             uint32_t *                       pRectCount,
                                             VULKAN_HPP_NAMESPACE::Rect2D *   pRects,
                                             Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
      m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Rect2DAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
                       PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  {
    std::vector<Rect2D, Rect2DAllocator> rects;
    uint32_t                             rectCount;
    Result                               result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
        m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && rectCount )
      {
        rects.resize( rectCount );
        result = static_cast<Result>(
          d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice,
                                                     static_cast<VkSurfaceKHR>( surface ),
                                                     &rectCount,
                                                     reinterpret_cast<VkRect2D *>( rects.data() ) ) );
        VULKAN_HPP_ASSERT( rectCount <= rects.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) )
    {
      rects.resize( rectCount );
    }
    return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
  }

  template <typename Rect2DAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type
                       PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
                                             Rect2DAllocator &                rect2DAllocator,
                                             Dispatch const &                 d ) const
  {
    std::vector<Rect2D, Rect2DAllocator> rects( rect2DAllocator );
    uint32_t                             rectCount;
    Result                               result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR(
        m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && rectCount )
      {
        rects.resize( rectCount );
        result = static_cast<Result>(
          d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice,
                                                     static_cast<VkSurfaceKHR>( surface ),
                                                     &rectCount,
                                                     reinterpret_cast<VkRect2D *>( rects.data() ) ) );
        VULKAN_HPP_ASSERT( rectCount <= rects.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) )
    {
      rects.resize( rectCount );
    }
    return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
                                  uint32_t *                                            pImageIndex,
                                  Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkAcquireNextImage2KHR(
      m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t>
                                         Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const
  {
    uint32_t imageIndex;
    Result   result = static_cast<Result>( d.vkAcquireNextImage2KHR(
      m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) );
    return createResultValue( result,
                              imageIndex,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eTimeout,
                                VULKAN_HPP_NAMESPACE::Result::eNotReady,
                                VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_display ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getDisplayPropertiesKHR( uint32_t *                                   pPropertyCount,
                                             VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
                                             Dispatch const &                             d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
      m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
    PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
  {
    std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
    uint32_t                                                         propertyCount;
    Result                                                           result;
    do
    {
      result =
        static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
  }

  template <typename DisplayPropertiesKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
    PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator,
                                             Dispatch const &                d ) const
  {
    std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
    uint32_t                                                         propertyCount;
    Result                                                           result;
    do
    {
      result =
        static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t *                                        pPropertyCount,
                                                  VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
      m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
    PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
  {
    std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
    uint32_t                                                                   propertyCount;
    Result                                                                     result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
  }

  template <typename DisplayPlanePropertiesKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
    PhysicalDevice::getDisplayPlanePropertiesKHR(
      DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
  {
    std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties(
      displayPlanePropertiesKHRAllocator );
    uint32_t propertyCount;
    Result   result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t                           planeIndex,
                                                         uint32_t *                         pDisplayCount,
                                                         VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
      m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename DisplayKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
                       PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
  {
    std::vector<DisplayKHR, DisplayKHRAllocator> displays;
    uint32_t                                     displayCount;
    Result                                       result;
    do
    {
      result = static_cast<Result>(
        d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && displayCount )
      {
        displays.resize( displayCount );
        result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
          m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
        VULKAN_HPP_ASSERT( displayCount <= displays.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) )
    {
      displays.resize( displayCount );
    }
    return createResultValue(
      result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
  }

  template <typename DisplayKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type
                       PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t              planeIndex,
                                                         DisplayKHRAllocator & displayKHRAllocator,
                                                         Dispatch const &      d ) const
  {
    std::vector<DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
    uint32_t                                     displayCount;
    Result                                       result;
    do
    {
      result = static_cast<Result>(
        d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && displayCount )
      {
        displays.resize( displayCount );
        result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR(
          m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
        VULKAN_HPP_ASSERT( displayCount <= displays.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) )
    {
      displays.resize( displayCount );
    }
    return createResultValue(
      result, displays, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR                 display,
                                                 uint32_t *                                       pPropertyCount,
                                                 VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
                                                 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
                                       static_cast<VkDisplayKHR>( display ),
                                       pPropertyCount,
                                       reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
    PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  {
    std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
    uint32_t                                                                 propertyCount;
    Result                                                                   result;
    do
    {
      result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
        m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>(
          d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
                                           static_cast<VkDisplayKHR>( display ),
                                           &propertyCount,
                                           reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
  }

  template <typename DisplayModePropertiesKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
    PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR    display,
                                                 DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
                                                 Dispatch const &                    d ) const
  {
    std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties(
      displayModePropertiesKHRAllocator );
    uint32_t propertyCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
        m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>(
          d.vkGetDisplayModePropertiesKHR( m_physicalDevice,
                                           static_cast<VkDisplayKHR>( display ),
                                           &propertyCount,
                                           reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR                       display,
                                          const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks *      pAllocator,
                                          VULKAN_HPP_NAMESPACE::DisplayModeKHR *                 pMode,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateDisplayModeKHR( m_physicalDevice,
                                static_cast<VkDisplayKHR>( display ),
                                reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
    PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR    display,
                                          const DisplayModeCreateInfoKHR &    createInfo,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
    Result                               result = static_cast<Result>(
      d.vkCreateDisplayModeKHR( m_physicalDevice,
                                static_cast<VkDisplayKHR>( display ),
                                reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
    return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
    PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR    display,
                                                const DisplayModeCreateInfoKHR &    createInfo,
                                                Optional<const AllocationCallbacks> allocator,
                                                Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
    Result                               result = static_cast<Result>(
      d.vkCreateDisplayModeKHR( m_physicalDevice,
                                static_cast<VkDisplayKHR>( display ),
                                reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
    ObjectDestroy<PhysicalDevice, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>(
      result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR                mode,
                                                    uint32_t                                            planeIndex,
                                                    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice,
                                          static_cast<VkDisplayModeKHR>( mode ),
                                          planeIndex,
                                          reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
    PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
                                                    uint32_t                             planeIndex,
                                                    Dispatch const &                     d ) const
  {
    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
    Result                                            result = static_cast<Result>(
      d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice,
                                          static_cast<VkDisplayModeKHR>( mode ),
                                          planeIndex,
                                          reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) );
    return createResultValue(
      result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks *         pAllocator,
                                            VULKAN_HPP_NAMESPACE::SurfaceKHR *                        pSurface,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
                                        reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                        reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo,
                                            Optional<const AllocationCallbacks> allocator,
                                            Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
                                        reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo,
                                                  Optional<const AllocationCallbacks> allocator,
                                                  Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
                                        reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_display_swapchain ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createSharedSwapchainsKHR( uint32_t                                             swapchainCount,
                                       const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks *    pAllocator,
                                       VULKAN_HPP_NAMESPACE::SwapchainKHR *                 pSwapchains,
                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateSharedSwapchainsKHR( m_device,
                                     swapchainCount,
                                     reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ),
                                     reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                     reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename SwapchainKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type
    Device::createSharedSwapchainsKHR(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks>                                    allocator,
      Dispatch const &                                                       d ) const
  {
    std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
    Result                                           result = static_cast<Result>(
      d.vkCreateSharedSwapchainsKHR( m_device,
                                     createInfos.size(),
                                     reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                     reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
    return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
  }

  template <typename SwapchainKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type
    Device::createSharedSwapchainsKHR(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks>                                    allocator,
      SwapchainKHRAllocator &                                                swapchainKHRAllocator,
      Dispatch const &                                                       d ) const
  {
    std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
    Result                                           result = static_cast<Result>(
      d.vkCreateSharedSwapchainsKHR( m_device,
                                     createInfos.size(),
                                     reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                     reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
    return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type
                                          Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
                                      Optional<const AllocationCallbacks>                  allocator,
                                      Dispatch const &                                     d ) const
  {
    SwapchainKHR swapchain;
    Result       result = static_cast<Result>(
      d.vkCreateSharedSwapchainsKHR( m_device,
                                     1,
                                     reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                     reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
    return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch, typename SwapchainKHRAllocator>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
    Device::createSharedSwapchainsKHRUnique(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks>                                    allocator,
      Dispatch const &                                                       d ) const
  {
    std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
    std::vector<SwapchainKHR>                                                swapchains( createInfos.size() );
    Result                                                                   result = static_cast<Result>(
      d.vkCreateSharedSwapchainsKHR( m_device,
                                     createInfos.size(),
                                     reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                     reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
    {
      uniqueSwapchains.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) );
      }
    }
    return createResultValue(
      result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
  }

  template <typename Dispatch,
            typename SwapchainKHRAllocator,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value,
                                    int>::type>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
    Device::createSharedSwapchainsKHRUnique(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks>                                    allocator,
      SwapchainKHRAllocator &                                                swapchainKHRAllocator,
      Dispatch const &                                                       d ) const
  {
    std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
    std::vector<SwapchainKHR>                                                swapchains( createInfos.size() );
    Result                                                                   result = static_cast<Result>(
      d.vkCreateSharedSwapchainsKHR( m_device,
                                     createInfos.size(),
                                     reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                     reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
    if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
    {
      uniqueSwapchains.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) );
      }
    }
    return createResultValue(
      result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type
    Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
                                            Optional<const AllocationCallbacks>                  allocator,
                                            Dispatch const &                                     d ) const
  {
    SwapchainKHR swapchain;
    Result       result = static_cast<Result>(
      d.vkCreateSharedSwapchainsKHR( m_device,
                                     1,
                                     reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                     reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<SwapchainKHR, Dispatch>(
      result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_XLIB_KHR )
  //=== VK_KHR_xlib_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
                                    const VULKAN_HPP_NAMESPACE::AllocationCallbacks *      pAllocator,
                                    VULKAN_HPP_NAMESPACE::SurfaceKHR *                     pSurface,
                                    Dispatch const &                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateXlibSurfaceKHR( m_instance,
                                reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR &    createInfo,
                                    Optional<const AllocationCallbacks> allocator,
                                    Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateXlibSurfaceKHR( m_instance,
                                reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR &    createInfo,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateXlibSurfaceKHR( m_instance,
                                reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t         queueFamilyIndex,
                                                                          Display *        dpy,
                                                                          VisualID         visualID,
                                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Bool32>(
      d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t         queueFamilyIndex,
                                                                          Display &        dpy,
                                                                          VisualID         visualID,
                                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_XLIB_KHR*/

#if defined( VK_USE_PLATFORM_XCB_KHR )
  //=== VK_KHR_xcb_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks *     pAllocator,
                                   VULKAN_HPP_NAMESPACE::SurfaceKHR *                    pSurface,
                                   Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateXcbSurfaceKHR( m_instance,
                               reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                               reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR &     createInfo,
                                   Optional<const AllocationCallbacks> allocator,
                                   Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateXcbSurfaceKHR( m_instance,
                               reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                               reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR &     createInfo,
                                         Optional<const AllocationCallbacks> allocator,
                                         Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateXcbSurfaceKHR( m_instance,
                               reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                               reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t           queueFamilyIndex,
                                                                         xcb_connection_t * connection,
                                                                         xcb_visualid_t     visual_id,
                                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Bool32>(
      d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t           queueFamilyIndex,
                                                                         xcb_connection_t & connection,
                                                                         xcb_visualid_t     visual_id,
                                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_XCB_KHR*/

#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  //=== VK_KHR_wayland_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks *         pAllocator,
                                       VULKAN_HPP_NAMESPACE::SurfaceKHR *                        pSurface,
                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateWaylandSurfaceKHR( m_instance,
                                   reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                   reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo,
                                       Optional<const AllocationCallbacks> allocator,
                                       Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateWaylandSurfaceKHR( m_instance,
                                   reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo,
                                             Optional<const AllocationCallbacks> allocator,
                                             Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateWaylandSurfaceKHR( m_instance,
                                   reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR(
    uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Bool32>(
      d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR(
    uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WAYLAND_KHR*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  //=== VK_KHR_android_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks *         pAllocator,
                                       VULKAN_HPP_NAMESPACE::SurfaceKHR *                        pSurface,
                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateAndroidSurfaceKHR( m_instance,
                                   reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                   reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo,
                                       Optional<const AllocationCallbacks> allocator,
                                       Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateAndroidSurfaceKHR( m_instance,
                                   reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo,
                                             Optional<const AllocationCallbacks> allocator,
                                             Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateAndroidSurfaceKHR( m_instance,
                                   reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_ANDROID_KHR*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  //=== VK_KHR_win32_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
                                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                     VULKAN_HPP_NAMESPACE::SurfaceKHR *                      pSurface,
                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateWin32SurfaceKHR( m_instance,
                                 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR &   createInfo,
                                     Optional<const AllocationCallbacks> allocator,
                                     Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateWin32SurfaceKHR( m_instance,
                                 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>(
                                   static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR &   createInfo,
                                           Optional<const AllocationCallbacks> allocator,
                                           Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateWin32SurfaceKHR( m_instance,
                                 reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>(
                                   static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR(
    uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Bool32>(
      d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
  }
#endif /*VK_USE_PLATFORM_WIN32_KHR*/

  //=== VK_EXT_debug_report ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks *              pAllocator,
                                            VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT *                 pCallback,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateDebugReportCallbackEXT( m_instance,
                                        reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                        reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type
    Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo,
                                            Optional<const AllocationCallbacks>      allocator,
                                            Dispatch const &                         d ) const
  {
    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
    Result                                       result = static_cast<Result>(
      d.vkCreateDebugReportCallbackEXT( m_instance,
                                        reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
    return createResultValue(
      result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
    Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo,
                                                  Optional<const AllocationCallbacks>      allocator,
                                                  Dispatch const &                         d ) const
  {
    VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
    Result                                       result = static_cast<Result>(
      d.vkCreateDebugReportCallbackEXT( m_instance,
                                        reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>(
      result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT      callback,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDebugReportCallbackEXT( m_instance,
                                       static_cast<VkDebugReportCallbackEXT>( callback ),
                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
                                                                  Optional<const AllocationCallbacks> allocator,
                                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDebugReportCallbackEXT(
      m_instance,
      static_cast<VkDebugReportCallbackEXT>( callback ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT      callback,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDebugReportCallbackEXT( m_instance,
                                       static_cast<VkDebugReportCallbackEXT>( callback ),
                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
                                            Optional<const AllocationCallbacks>          allocator,
                                            Dispatch const &                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDebugReportCallbackEXT(
      m_instance,
      static_cast<VkDebugReportCallbackEXT>( callback ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT      flags,
                                                          VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
                                                          uint64_t                                       object,
                                                          size_t                                         location,
                                                          int32_t                                        messageCode,
                                                          const char *                                   pLayerPrefix,
                                                          const char *                                   pMessage,
                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDebugReportMessageEXT( m_instance,
                               static_cast<VkDebugReportFlagsEXT>( flags ),
                               static_cast<VkDebugReportObjectTypeEXT>( objectType ),
                               object,
                               location,
                               messageCode,
                               pLayerPrefix,
                               pMessage );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT      flags,
                                                          VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
                                                          uint64_t                                       object,
                                                          size_t                                         location,
                                                          int32_t                                        messageCode,
                                                          const std::string &                            layerPrefix,
                                                          const std::string &                            message,
                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDebugReportMessageEXT( m_instance,
                               static_cast<VkDebugReportFlagsEXT>( flags ),
                               static_cast<VkDebugReportObjectTypeEXT>( objectType ),
                               object,
                               location,
                               messageCode,
                               layerPrefix.c_str(),
                               message.c_str() );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_debug_marker ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT(
    const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT(
    const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT(
      m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT(
      m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo,
                                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_KHR_video_queue ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileKHR * pVideoProfile,
                                             VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR *  pCapabilities,
                                             Dispatch const &                              d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
                                                 reinterpret_cast<const VkVideoProfileKHR *>( pVideoProfile ),
                                                 reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
    PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
    Result                                     result = static_cast<Result>(
      d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
                                                 reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
                                                 reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
    return createResultValue(
      result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
                                          PhysicalDevice::getVideoCapabilitiesKHR( const VideoProfileKHR & videoProfile, Dispatch const & d ) const
  {
    StructureChain<X, Y, Z...>                   structureChain;
    VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities =
      structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
    Result result = static_cast<Result>(
      d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice,
                                                 reinterpret_cast<const VkVideoProfileKHR *>( &videoProfile ),
                                                 reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ) );
    return createResultValue(
      result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
    uint32_t *                                                     pVideoFormatPropertyCount,
    VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR *               pVideoFormatProperties,
    Dispatch const &                                               d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
      pVideoFormatPropertyCount,
      reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
    PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
                                                 Dispatch const &                         d ) const
  {
    std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
    uint32_t                                                                 videoFormatPropertyCount;
    Result                                                                   result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
        m_physicalDevice,
        reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
        &videoFormatPropertyCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && videoFormatPropertyCount )
      {
        videoFormatProperties.resize( videoFormatPropertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
          m_physicalDevice,
          reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
          &videoFormatPropertyCount,
          reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
        VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) )
    {
      videoFormatProperties.resize( videoFormatPropertyCount );
    }
    return createResultValue(
      result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
  }

  template <typename VideoFormatPropertiesKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, VideoFormatPropertiesKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
    PhysicalDevice::getVideoFormatPropertiesKHR( const PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
                                                 VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
                                                 Dispatch const &                    d ) const
  {
    std::vector<VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties(
      videoFormatPropertiesKHRAllocator );
    uint32_t videoFormatPropertyCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
        m_physicalDevice,
        reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
        &videoFormatPropertyCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && videoFormatPropertyCount )
      {
        videoFormatProperties.resize( videoFormatPropertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
          m_physicalDevice,
          reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
          &videoFormatPropertyCount,
          reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ) );
        VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( videoFormatPropertyCount < videoFormatProperties.size() ) )
    {
      videoFormatProperties.resize( videoFormatPropertyCount );
    }
    return createResultValue(
      result, videoFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                   VULKAN_HPP_NAMESPACE::VideoSessionKHR *                 pVideoSession,
                                   Dispatch const &                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateVideoSessionKHR( m_device,
                                 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                 reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
    Device::createVideoSessionKHR( const VideoSessionCreateInfoKHR &   createInfo,
                                   Optional<const AllocationCallbacks> allocator,
                                   Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
    Result                                result = static_cast<Result>(
      d.vkCreateVideoSessionKHR( m_device,
                                 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>(
                                   static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) );
    return createResultValue( result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
    Device::createVideoSessionKHRUnique( const VideoSessionCreateInfoKHR &   createInfo,
                                         Optional<const AllocationCallbacks> allocator,
                                         Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
    Result                                result = static_cast<Result>(
      d.vkCreateVideoSessionKHR( m_device,
                                 reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>(
                                   static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                 reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>(
      result, videoSession, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR             videoSession,
                                                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyVideoSessionKHR( m_device,
                                static_cast<VkVideoSessionKHR>( videoSession ),
                                reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
                                                         Optional<const AllocationCallbacks>   allocator,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyVideoSessionKHR( m_device,
                                static_cast<VkVideoSessionKHR>( videoSession ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR             videoSession,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyVideoSessionKHR( m_device,
                                static_cast<VkVideoSessionKHR>( videoSession ),
                                reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
                                          Optional<const AllocationCallbacks>   allocator,
                                          Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyVideoSessionKHR( m_device,
                                static_cast<VkVideoSessionKHR>( videoSession ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR(
    VULKAN_HPP_NAMESPACE::VideoSessionKHR               videoSession,
    uint32_t *                                          pVideoSessionMemoryRequirementsCount,
    VULKAN_HPP_NAMESPACE::VideoGetMemoryPropertiesKHR * pVideoSessionMemoryRequirements,
    Dispatch const &                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
      m_device,
      static_cast<VkVideoSessionKHR>( videoSession ),
      pVideoSessionMemoryRequirementsCount,
      reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( pVideoSessionMemoryRequirements ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename VideoGetMemoryPropertiesKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
    Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
                                                  Dispatch const &                      d ) const
  {
    std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements;
    uint32_t                                                                       videoSessionMemoryRequirementsCount;
    Result                                                                         result;
    do
    {
      result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
        m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount )
      {
        videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
        result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
          m_device,
          static_cast<VkVideoSessionKHR>( videoSession ),
          &videoSessionMemoryRequirementsCount,
          reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) );
        VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) &&
         ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) )
    {
      videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
    }
    return createResultValue( result,
                              videoSessionMemoryRequirements,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
  }

  template <
    typename VideoGetMemoryPropertiesKHRAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, VideoGetMemoryPropertiesKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator>>::type
    Device::getVideoSessionMemoryRequirementsKHR(
      VULKAN_HPP_NAMESPACE::VideoSessionKHR  videoSession,
      VideoGetMemoryPropertiesKHRAllocator & videoGetMemoryPropertiesKHRAllocator,
      Dispatch const &                       d ) const
  {
    std::vector<VideoGetMemoryPropertiesKHR, VideoGetMemoryPropertiesKHRAllocator> videoSessionMemoryRequirements(
      videoGetMemoryPropertiesKHRAllocator );
    uint32_t videoSessionMemoryRequirementsCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
        m_device, static_cast<VkVideoSessionKHR>( videoSession ), &videoSessionMemoryRequirementsCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && videoSessionMemoryRequirementsCount )
      {
        videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
        result = static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR(
          m_device,
          static_cast<VkVideoSessionKHR>( videoSession ),
          &videoSessionMemoryRequirementsCount,
          reinterpret_cast<VkVideoGetMemoryPropertiesKHR *>( videoSessionMemoryRequirements.data() ) ) );
        VULKAN_HPP_ASSERT( videoSessionMemoryRequirementsCount <= videoSessionMemoryRequirements.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) &&
         ( videoSessionMemoryRequirementsCount < videoSessionMemoryRequirements.size() ) )
    {
      videoSessionMemoryRequirements.resize( videoSessionMemoryRequirementsCount );
    }
    return createResultValue( result,
                              videoSessionMemoryRequirements,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR            videoSession,
                                       uint32_t                                         videoSessionBindMemoryCount,
                                       const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR * pVideoSessionBindMemories,
                                       Dispatch const &                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkBindVideoSessionMemoryKHR( m_device,
                                     static_cast<VkVideoSessionKHR>( videoSession ),
                                     videoSessionBindMemoryCount,
                                     reinterpret_cast<const VkVideoBindMemoryKHR *>( pVideoSessionBindMemories ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::bindVideoSessionMemoryKHR(
      VULKAN_HPP_NAMESPACE::VideoSessionKHR                              videoSession,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::VideoBindMemoryKHR> const & videoSessionBindMemories,
      Dispatch const &                                                   d ) const
  {
    Result result = static_cast<Result>( d.vkBindVideoSessionMemoryKHR(
      m_device,
      static_cast<VkVideoSessionKHR>( videoSession ),
      videoSessionBindMemories.size(),
      reinterpret_cast<const VkVideoBindMemoryKHR *>( videoSessionBindMemories.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR(
    const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
    const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                 pAllocator,
    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR *                 pVideoSessionParameters,
    Dispatch const &                                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateVideoSessionParametersKHR(
      m_device,
      reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
      reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
    Device::createVideoSessionParametersKHR( const VideoSessionParametersCreateInfoKHR & createInfo,
                                             Optional<const AllocationCallbacks>         allocator,
                                             Dispatch const &                            d ) const
  {
    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
    Result                                          result = static_cast<Result>( d.vkCreateVideoSessionParametersKHR(
      m_device,
      reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) );
    return createResultValue(
      result, videoSessionParameters, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
    Device::createVideoSessionParametersKHRUnique( const VideoSessionParametersCreateInfoKHR & createInfo,
                                                   Optional<const AllocationCallbacks>         allocator,
                                                   Dispatch const &                            d ) const
  {
    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
    Result                                          result = static_cast<Result>( d.vkCreateVideoSessionParametersKHR(
      m_device,
      reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ) );
    ObjectDestroy<Device, Dispatch>                 deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>(
      result,
      videoSessionParameters,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique",
      deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR(
    VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR                   videoSessionParameters,
    const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
    Dispatch const &                                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR(
      m_device,
      static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
      reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
                                             const VideoSessionParametersUpdateInfoKHR &     updateInfo,
                                             Dispatch const &                                d ) const
  {
    Result result = static_cast<Result>( d.vkUpdateVideoSessionParametersKHR(
      m_device,
      static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
      reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR   videoSessionParameters,
                                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyVideoSessionParametersKHR( m_device,
                                          static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
                                          reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
                                              Optional<const AllocationCallbacks>             allocator,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyVideoSessionParametersKHR(
      m_device,
      static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR   videoSessionParameters,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyVideoSessionParametersKHR( m_device,
                                          static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
                                          reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
                                          Optional<const AllocationCallbacks>             allocator,
                                          Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyVideoSessionParametersKHR(
      m_device,
      static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VideoBeginCodingInfoKHR & beginInfo,
                                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
                                      Dispatch const &                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VideoEndCodingInfoKHR & endCodingInfo,
                                                           Dispatch const &              d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdControlVideoCodingKHR( m_commandBuffer,
                                  reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VideoCodingControlInfoKHR & codingControlInfo,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdControlVideoCodingKHR( m_commandBuffer,
                                  reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_KHR_video_decode_queue ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pFrameInfo ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VideoDecodeInfoKHR & frameInfo,
                                                        Dispatch const &           d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &frameInfo ) );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_ENABLE_BETA_EXTENSIONS*/

  //=== VK_EXT_transform_feedback ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t                                 firstBinding,
                                                    uint32_t                                 bindingCount,
                                                    const VULKAN_HPP_NAMESPACE::Buffer *     pBuffers,
                                                    const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
                                                    const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
                                            firstBinding,
                                            bindingCount,
                                            reinterpret_cast<const VkBuffer *>( pBuffers ),
                                            reinterpret_cast<const VkDeviceSize *>( pOffsets ),
                                            reinterpret_cast<const VkDeviceSize *>( pSizes ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t                                               firstBinding,
                                                    ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
                                                    ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
                                                    ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
    VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
#  else
    if ( buffers.size() != offsets.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
    }
    if ( !sizes.empty() && buffers.size() != sizes.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
                                            firstBinding,
                                            buffers.size(),
                                            reinterpret_cast<const VkBuffer *>( buffers.data() ),
                                            reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
                                            reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::beginTransformFeedbackEXT( uint32_t                                 firstCounterBuffer,
                                              uint32_t                                 counterBufferCount,
                                              const VULKAN_HPP_NAMESPACE::Buffer *     pCounterBuffers,
                                              const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
                                              Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
                                      firstCounterBuffer,
                                      counterBufferCount,
                                      reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
                                      reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT(
    uint32_t                                                   firstCounterBuffer,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &     counterBuffers,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
    Dispatch const &                                           d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
#  else
    if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
    {
      throw LogicError(
        VULKAN_HPP_NAMESPACE_STRING
        "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
                                      firstCounterBuffer,
                                      counterBuffers.size(),
                                      reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
                                      reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::endTransformFeedbackEXT( uint32_t                                 firstCounterBuffer,
                                            uint32_t                                 counterBufferCount,
                                            const VULKAN_HPP_NAMESPACE::Buffer *     pCounterBuffers,
                                            const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
                                            Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
                                    firstCounterBuffer,
                                    counterBufferCount,
                                    reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
                                    reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT(
    uint32_t                                                   firstCounterBuffer,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &     counterBuffers,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
    Dispatch const &                                           d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
#  else
    if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
    {
      throw LogicError(
        VULKAN_HPP_NAMESPACE_STRING
        "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
                                    firstCounterBuffer,
                                    counterBuffers.size(),
                                    reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
                                    reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool         queryPool,
                                                              uint32_t                                query,
                                                              VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
                                                              uint32_t                                index,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginQueryIndexedEXT(
      m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                                                            uint32_t                        query,
                                                            uint32_t                        index,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t                         instanceCount,
                                                                  uint32_t                         firstInstance,
                                                                  VULKAN_HPP_NAMESPACE::Buffer     counterBuffer,
                                                                  VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
                                                                  uint32_t                         counterOffset,
                                                                  uint32_t                         vertexStride,
                                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
                                     instanceCount,
                                     firstInstance,
                                     static_cast<VkBuffer>( counterBuffer ),
                                     static_cast<VkDeviceSize>( counterBufferOffset ),
                                     counterOffset,
                                     vertexStride );
  }

  //=== VK_NVX_binary_import ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
                               const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                               VULKAN_HPP_NAMESPACE::CuModuleNVX *                 pModule,
                               Dispatch const &                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
                                                       reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ),
                                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                       reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
    Device::createCuModuleNVX( const CuModuleCreateInfoNVX &       createInfo,
                               Optional<const AllocationCallbacks> allocator,
                               Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::CuModuleNVX module;
    Result                            result = static_cast<Result>(
      d.vkCreateCuModuleNVX( m_device,
                             reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
    return createResultValue( result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
    Device::createCuModuleNVXUnique( const CuModuleCreateInfoNVX &       createInfo,
                                     Optional<const AllocationCallbacks> allocator,
                                     Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::CuModuleNVX module;
    Result                            result = static_cast<Result>(
      d.vkCreateCuModuleNVX( m_device,
                             reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkCuModuleNVX *>( &module ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>(
      result, module, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
                                 const VULKAN_HPP_NAMESPACE::AllocationCallbacks *     pAllocator,
                                 VULKAN_HPP_NAMESPACE::CuFunctionNVX *                 pFunction,
                                 Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateCuFunctionNVX( m_device,
                               reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                               reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
    Device::createCuFunctionNVX( const CuFunctionCreateInfoNVX &     createInfo,
                                 Optional<const AllocationCallbacks> allocator,
                                 Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
    Result                              result = static_cast<Result>(
      d.vkCreateCuFunctionNVX( m_device,
                               reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                               reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
    return createResultValue( result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
    Device::createCuFunctionNVXUnique( const CuFunctionCreateInfoNVX &     createInfo,
                                       Optional<const AllocationCallbacks> allocator,
                                       Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
    Result                              result = static_cast<Result>(
      d.vkCreateCuFunctionNVX( m_device,
                               reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                               reinterpret_cast<VkCuFunctionNVX *>( &function ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>(
      result, function, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX                 module,
                                                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCuModuleNVX(
      m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX   module,
                                                     Optional<const AllocationCallbacks> allocator,
                                                     Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCuModuleNVX( m_device,
                            static_cast<VkCuModuleNVX>( module ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX                 module,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCuModuleNVX(
      m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX   module,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCuModuleNVX( m_device,
                            static_cast<VkCuModuleNVX>( module ),
                            reinterpret_cast<const VkAllocationCallbacks *>(
                              static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX               function,
                                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCuFunctionNVX( m_device,
                              static_cast<VkCuFunctionNVX>( function ),
                              reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
                                                       Optional<const AllocationCallbacks> allocator,
                                                       Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCuFunctionNVX( m_device,
                              static_cast<VkCuFunctionNVX>( function ),
                              reinterpret_cast<const VkAllocationCallbacks *>(
                                static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX               function,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCuFunctionNVX( m_device,
                              static_cast<VkCuFunctionNVX>( function ),
                              reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
                                          Optional<const AllocationCallbacks> allocator,
                                          Dispatch const &                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyCuFunctionNVX( m_device,
                              static_cast<VkCuFunctionNVX>( function ),
                              reinterpret_cast<const VkAllocationCallbacks *>(
                                static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
                                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const CuLaunchInfoNVX & launchInfo,
                                                           Dispatch const &        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_NVX_image_view_handle ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info,
                                                            Dispatch const &               d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView                       imageView,
                                    VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
                                    Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetImageViewAddressNVX( m_device,
                                  static_cast<VkImageView>( imageView ),
                                  reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
    Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
    Result                                              result = static_cast<Result>(
      d.vkGetImageViewAddressNVX( m_device,
                                  static_cast<VkImageView>( imageView ),
                                  reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) );
    return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_AMD_draw_indirect_count ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                              VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                              VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                                              VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                                              uint32_t                         maxDrawCount,
                                                              uint32_t                         stride,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
                                 static_cast<VkBuffer>( buffer ),
                                 static_cast<VkDeviceSize>( offset ),
                                 static_cast<VkBuffer>( countBuffer ),
                                 static_cast<VkDeviceSize>( countBufferOffset ),
                                 maxDrawCount,
                                 stride );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                                     VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                                     VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                                                     VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                                                     uint32_t                         maxDrawCount,
                                                                     uint32_t                         stride,
                                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
                                        static_cast<VkBuffer>( buffer ),
                                        static_cast<VkDeviceSize>( offset ),
                                        static_cast<VkBuffer>( countBuffer ),
                                        static_cast<VkDeviceSize>( countBufferOffset ),
                                        maxDrawCount,
                                        stride );
  }

  //=== VK_AMD_shader_info ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline            pipeline,
                              VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
                              VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD   infoType,
                              size_t *                                  pInfoSize,
                              void *                                    pInfo,
                              Dispatch const &                          d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
                                                      static_cast<VkPipeline>( pipeline ),
                                                      static_cast<VkShaderStageFlagBits>( shaderStage ),
                                                      static_cast<VkShaderInfoTypeAMD>( infoType ),
                                                      pInfoSize,
                                                      pInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Uint8_tAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
                       Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline            pipeline,
                              VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
                              VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD   infoType,
                              Dispatch const &                          d ) const
  {
    std::vector<uint8_t, Uint8_tAllocator> info;
    size_t                                 infoSize;
    Result                                 result;
    do
    {
      result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
                                                          static_cast<VkPipeline>( pipeline ),
                                                          static_cast<VkShaderStageFlagBits>( shaderStage ),
                                                          static_cast<VkShaderInfoTypeAMD>( infoType ),
                                                          &infoSize,
                                                          nullptr ) );
      if ( ( result == Result::eSuccess ) && infoSize )
      {
        info.resize( infoSize );
        result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
                                                            static_cast<VkPipeline>( pipeline ),
                                                            static_cast<VkShaderStageFlagBits>( shaderStage ),
                                                            static_cast<VkShaderInfoTypeAMD>( infoType ),
                                                            &infoSize,
                                                            reinterpret_cast<void *>( info.data() ) ) );
        VULKAN_HPP_ASSERT( infoSize <= info.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) )
    {
      info.resize( infoSize );
    }
    return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
  }

  template <typename Uint8_tAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
                       Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline            pipeline,
                              VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
                              VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD   infoType,
                              Uint8_tAllocator &                        uint8_tAllocator,
                              Dispatch const &                          d ) const
  {
    std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
    size_t                                 infoSize;
    Result                                 result;
    do
    {
      result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
                                                          static_cast<VkPipeline>( pipeline ),
                                                          static_cast<VkShaderStageFlagBits>( shaderStage ),
                                                          static_cast<VkShaderInfoTypeAMD>( infoType ),
                                                          &infoSize,
                                                          nullptr ) );
      if ( ( result == Result::eSuccess ) && infoSize )
      {
        info.resize( infoSize );
        result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
                                                            static_cast<VkPipeline>( pipeline ),
                                                            static_cast<VkShaderStageFlagBits>( shaderStage ),
                                                            static_cast<VkShaderInfoTypeAMD>( infoType ),
                                                            &infoSize,
                                                            reinterpret_cast<void *>( info.data() ) ) );
        VULKAN_HPP_ASSERT( infoSize <= info.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) )
    {
      info.resize( infoSize );
    }
    return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_GGP )
  //=== VK_GGP_stream_descriptor_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP(
    const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
    const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                  pAllocator,
    VULKAN_HPP_NAMESPACE::SurfaceKHR *                                 pSurface,
    Dispatch const &                                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
      m_instance,
      reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
      reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo,
                                                Optional<const AllocationCallbacks>          allocator,
                                                Dispatch const &                             d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
      m_instance,
      reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo,
                                                      Optional<const AllocationCallbacks>          allocator,
                                                      Dispatch const &                             d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR  surface;
    Result                            result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP(
      m_instance,
      reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_GGP*/

  //=== VK_NV_external_memory_capabilities ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV(
    VULKAN_HPP_NAMESPACE::Format                            format,
    VULKAN_HPP_NAMESPACE::ImageType                         type,
    VULKAN_HPP_NAMESPACE::ImageTiling                       tiling,
    VULKAN_HPP_NAMESPACE::ImageUsageFlags                   usage,
    VULKAN_HPP_NAMESPACE::ImageCreateFlags                  flags,
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV   externalHandleType,
    VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
    Dispatch const &                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
      m_physicalDevice,
      static_cast<VkFormat>( format ),
      static_cast<VkImageType>( type ),
      static_cast<VkImageTiling>( tiling ),
      static_cast<VkImageUsageFlags>( usage ),
      static_cast<VkImageCreateFlags>( flags ),
      static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
      reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
    PhysicalDevice::getExternalImageFormatPropertiesNV(
      VULKAN_HPP_NAMESPACE::Format                          format,
      VULKAN_HPP_NAMESPACE::ImageType                       type,
      VULKAN_HPP_NAMESPACE::ImageTiling                     tiling,
      VULKAN_HPP_NAMESPACE::ImageUsageFlags                 usage,
      VULKAN_HPP_NAMESPACE::ImageCreateFlags                flags,
      VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
      Dispatch const &                                      d ) const
  {
    VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
      m_physicalDevice,
      static_cast<VkFormat>( format ),
      static_cast<VkImageType>( type ),
      static_cast<VkImageTiling>( tiling ),
      static_cast<VkImageUsageFlags>( usage ),
      static_cast<VkImageCreateFlags>( flags ),
      static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
      reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) );
    return createResultValue( result,
                              externalImageFormatProperties,
                              VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  //=== VK_NV_external_memory_win32 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory                    memory,
                                    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
                                    HANDLE *                                              pHandle,
                                    Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetMemoryWin32HandleNV( m_device,
                                  static_cast<VkDeviceMemory>( memory ),
                                  static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ),
                                  pHandle ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
                                          Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory                    memory,
                                    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
                                    Dispatch const &                                      d ) const
  {
    HANDLE handle;
    Result result =
      static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device,
                                                       static_cast<VkDeviceMemory>( memory ),
                                                       static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ),
                                                       &handle ) );
    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

  //=== VK_KHR_get_physical_device_properties2 ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
                                         PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
    return features;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                      structureChain;
    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features =
      structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
    d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
                                       Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
                                         reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
                                         PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
                                         reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
    return properties;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                        structureChain;
    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties =
      structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
    d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice,
                                         reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format              format,
                                             VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
                                             Dispatch const &                          d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceFormatProperties2KHR(
      m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
                                         PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
                                             Dispatch const &             d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
    d.vkGetPhysicalDeviceFormatProperties2KHR(
      m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
    return formatProperties;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
                                             Dispatch const &             d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                structureChain;
    VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties =
      structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
    d.vkGetPhysicalDeviceFormatProperties2KHR(
      m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 *               pImageFormatProperties,
    Dispatch const &                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
      reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
    PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
                                                  Dispatch const &                       d ) const
  {
    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
      reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
    return createResultValue(
      result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
                                          PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo,
                                                  Dispatch const &                       d ) const
  {
    StructureChain<X, Y, Z...>                     structureChain;
    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties =
      structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
      reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
    return createResultValue(
      result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
                                                  VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
      m_physicalDevice,
      pQueueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename QueueFamilyProperties2Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
                                         PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
  {
    std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
    uint32_t                                                             queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    queueFamilyProperties.resize( queueFamilyPropertyCount );
    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    return queueFamilyProperties;
  }

  template <typename QueueFamilyProperties2Allocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator>
                                         PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator,
                                                  Dispatch const &                  d ) const
  {
    std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties(
      queueFamilyProperties2Allocator );
    uint32_t queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    queueFamilyProperties.resize( queueFamilyPropertyCount );
    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    return queueFamilyProperties;
  }

  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
                                         PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
  {
    uint32_t queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    std::vector<StructureChain, StructureChainAllocator>      returnVector( queueFamilyPropertyCount );
    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
    {
      queueFamilyProperties[i].pNext =
        returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
    }
    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
    {
      returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
    }
    return returnVector;
  }

  template <typename StructureChain,
            typename StructureChainAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
                                         PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator,
                                                  Dispatch const &          d ) const
  {
    uint32_t queueFamilyPropertyCount;
    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
    std::vector<StructureChain, StructureChainAllocator>      returnVector( queueFamilyPropertyCount,
                                                                       structureChainAllocator );
    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
    {
      queueFamilyProperties[i].pNext =
        returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
    }
    d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
      m_physicalDevice,
      &queueFamilyPropertyCount,
      reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
    {
      returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
    }
    return returnVector;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceMemoryProperties2KHR(
      m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
                                         PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
    d.vkGetPhysicalDeviceMemoryProperties2KHR(
      m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
    return memoryProperties;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                              structureChain;
    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
      structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
    d.vkGetPhysicalDeviceMemoryProperties2KHR(
      m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
    uint32_t *                                                         pPropertyCount,
    VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 *               pProperties,
    Dispatch const &                                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
      pPropertyCount,
      reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
                      PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
                                                        Dispatch const &                             d ) const
  {
    std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
    uint32_t                                                                         propertyCount;
    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
      &propertyCount,
      nullptr );
    properties.resize( propertyCount );
    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
      &propertyCount,
      reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
    return properties;
  }

  template <
    typename SparseImageFormatProperties2Allocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
                      PhysicalDevice::getSparseImageFormatProperties2KHR(
      const PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
      SparseImageFormatProperties2Allocator &      sparseImageFormatProperties2Allocator,
      Dispatch const &                             d ) const
  {
    std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties(
      sparseImageFormatProperties2Allocator );
    uint32_t propertyCount;
    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
      &propertyCount,
      nullptr );
    properties.resize( propertyCount );
    d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
      &propertyCount,
      reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
    return properties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_device_group ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getGroupPeerMemoryFeaturesKHR( uint32_t                                       heapIndex,
                                           uint32_t                                       localDeviceIndex,
                                           uint32_t                                       remoteDeviceIndex,
                                           VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
                                           Dispatch const &                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device,
                                             heapIndex,
                                             localDeviceIndex,
                                             remoteDeviceIndex,
                                             reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
                                         Device::getGroupPeerMemoryFeaturesKHR( uint32_t         heapIndex,
                                           uint32_t         localDeviceIndex,
                                           uint32_t         remoteDeviceIndex,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
    d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device,
                                             heapIndex,
                                             localDeviceIndex,
                                             remoteDeviceIndex,
                                             reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
    return peerMemoryFeatures;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t         deviceMask,
                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t         baseGroupX,
                                                         uint32_t         baseGroupY,
                                                         uint32_t         baseGroupZ,
                                                         uint32_t         groupCountX,
                                                         uint32_t         groupCountY,
                                                         uint32_t         groupCountZ,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDispatchBaseKHR(
      m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  }

#if defined( VK_USE_PLATFORM_VI_NN )
  //=== VK_NN_vi_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
                                 const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                                 VULKAN_HPP_NAMESPACE::SurfaceKHR *                  pSurface,
                                 Dispatch const &                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
                                                       reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ),
                                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                                       reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN &       createInfo,
                                 Optional<const AllocationCallbacks> allocator,
                                 Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateViSurfaceNN( m_instance,
                             reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN &       createInfo,
                                       Optional<const AllocationCallbacks> allocator,
                                       Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateViSurfaceNN( m_instance,
                             reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
                             reinterpret_cast<const VkAllocationCallbacks *>(
                               static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                             reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_VI_NN*/

  //=== VK_KHR_maintenance1 ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool          commandPool,
                                                     VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkTrimCommandPoolKHR(
      m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  }

  //=== VK_KHR_device_group_creation ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR(
    uint32_t *                                            pPhysicalDeviceGroupCount,
    VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
    Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
      m_instance,
      pPhysicalDeviceGroupCount,
      reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
    Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const
  {
    std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
    uint32_t                                                                           physicalDeviceGroupCount;
    Result                                                                             result;
    do
    {
      result =
        static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
      {
        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
          m_instance,
          &physicalDeviceGroupCount,
          reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
        VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
    {
      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
    }
    return createResultValue( result,
                              physicalDeviceGroupProperties,
                              VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
  }

  template <
    typename PhysicalDeviceGroupPropertiesAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
    Instance::enumeratePhysicalDeviceGroupsKHR(
      PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
  {
    std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
      physicalDeviceGroupPropertiesAllocator );
    uint32_t physicalDeviceGroupCount;
    Result   result;
    do
    {
      result =
        static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
      {
        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
          m_instance,
          &physicalDeviceGroupCount,
          reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
        VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
    {
      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
    }
    return createResultValue( result,
                              physicalDeviceGroupProperties,
                              VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_external_memory_capabilities ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
    VULKAN_HPP_NAMESPACE::ExternalBufferProperties *               pExternalBufferProperties,
    Dispatch const &                                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
      reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
                                         PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
    d.vkGetPhysicalDeviceExternalBufferPropertiesKHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
      reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
    return externalBufferProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  //=== VK_KHR_external_memory_win32 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
                                     HANDLE *                                                  pHandle,
                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetMemoryWin32HandleKHR(
      m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
                                          Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  {
    HANDLE handle;
    Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR(
      m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR(
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
    HANDLE                                                 handle,
    VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
    Dispatch const &                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR(
      m_device,
      static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
      handle,
      reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
    Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                               HANDLE                                                 handle,
                                               Dispatch const &                                       d ) const
  {
    VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
    Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR(
      m_device,
      static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
      handle,
      reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) );
    return createResultValue(
      result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

  //=== VK_KHR_external_memory_fd ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
                            int *                                            pFd,
                            Dispatch const &                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
                                          Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
  {
    int    fd;
    Result result = static_cast<Result>(
      d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) );
    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                      int                                                    fd,
                                      VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR *          pMemoryFdProperties,
                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetMemoryFdPropertiesKHR( m_device,
                                    static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
                                    fd,
                                    reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
    Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                      int                                                    fd,
                                      Dispatch const &                                       d ) const
  {
    VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
    Result                                      result = static_cast<Result>(
      d.vkGetMemoryFdPropertiesKHR( m_device,
                                    static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
                                    fd,
                                    reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) );
    return createResultValue(
      result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_external_semaphore_capabilities ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties *               pExternalSemaphoreProperties,
    Dispatch const &                                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
      reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
                                         PhysicalDevice::getExternalSemaphorePropertiesKHR(
      const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
    d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
      reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
    return externalSemaphoreProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  //=== VK_KHR_external_semaphore_win32 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR(
    const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo,
    Dispatch const &                                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR(
      m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
                                           Dispatch const &                          d ) const
  {
    Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR(
      m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR(
    const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo,
    HANDLE *                                                     pHandle,
    Dispatch const &                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR(
      m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
                                          Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo,
                                        Dispatch const &                       d ) const
  {
    HANDLE handle;
    Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR(
      m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

  //=== VK_KHR_external_semaphore_fd ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
                                  Dispatch const &                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkImportSemaphoreFdKHR(
      m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR(
      m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
                               int *                                               pFd,
                               Dispatch const &                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
                                          Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
  {
    int    fd;
    Result result = static_cast<Result>(
      d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) );
    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_push_descriptor ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint          pipelineBindPoint,
                                         VULKAN_HPP_NAMESPACE::PipelineLayout             layout,
                                         uint32_t                                         set,
                                         uint32_t                                         descriptorWriteCount,
                                         const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
                                         Dispatch const &                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
                                 static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
                                 static_cast<VkPipelineLayout>( layout ),
                                 set,
                                 descriptorWriteCount,
                                 reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR(
    VULKAN_HPP_NAMESPACE::PipelineBindPoint                            pipelineBindPoint,
    VULKAN_HPP_NAMESPACE::PipelineLayout                               layout,
    uint32_t                                                           set,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
    Dispatch const &                                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
                                 static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
                                 static_cast<VkPipelineLayout>( layout ),
                                 set,
                                 descriptorWrites.size(),
                                 reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR(
    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
    VULKAN_HPP_NAMESPACE::PipelineLayout           layout,
    uint32_t                                       set,
    const void *                                   pData,
    Dispatch const &                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
                                             static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
                                             static_cast<VkPipelineLayout>( layout ),
                                             set,
                                             pData );
  }

  //=== VK_EXT_conditional_rendering ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT(
    const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
    Dispatch const &                                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginConditionalRenderingEXT(
      m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
                                                 Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginConditionalRenderingEXT(
      m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
  }

  //=== VK_KHR_descriptor_update_template ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR(
    const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
    const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                pAllocator,
    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate *                 pDescriptorUpdateTemplate,
    Dispatch const &                                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
      m_device,
      reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
      reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
    Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo,
                                               Optional<const AllocationCallbacks>        allocator,
                                               Dispatch const &                           d ) const
  {
    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
    Result                                         result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
      m_device,
      reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
    return createResultValue(
      result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
    Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo,
                                                     Optional<const AllocationCallbacks>        allocator,
                                                     Dispatch const &                           d ) const
  {
    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
    Result                                         result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR(
      m_device,
      reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
    ObjectDestroy<Device, Dispatch>                deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
      result,
      descriptorUpdateTemplate,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique",
      deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                                                const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorUpdateTemplateKHR( m_device,
                                            static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
                                            reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                                                Optional<const AllocationCallbacks>            allocator,
                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDescriptorUpdateTemplateKHR(
      m_device,
      static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet            descriptorSet,
                                                VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
                                                const void *                                   pData,
                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
                                            static_cast<VkDescriptorSet>( descriptorSet ),
                                            static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
                                            pData );
  }

  //=== VK_NV_clip_space_w_scaling ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setViewportWScalingNV( uint32_t                                         firstViewport,
                                          uint32_t                                         viewportCount,
                                          const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
                                          Dispatch const &                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetViewportWScalingNV( m_commandBuffer,
                                  firstViewport,
                                  viewportCount,
                                  reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV(
    uint32_t                                                           firstViewport,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
    Dispatch const &                                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetViewportWScalingNV( m_commandBuffer,
                                  firstViewport,
                                  viewportWScalings.size(),
                                  reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_direct_mode_display ===

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type
    PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  {
    Result result =
      static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
  //=== VK_EXT_acquire_xlib_display ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT(
    Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          PhysicalDevice::acquireXlibDisplayEXT( Display &                        dpy,
                                           VULKAN_HPP_NAMESPACE::DisplayKHR display,
                                           Dispatch const &                 d ) const
  {
    Result result =
      static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getRandROutputDisplayEXT( Display *                          dpy,
                                              RROutput                           rrOutput,
                                              VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
                                              Dispatch const &                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
    PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::DisplayKHR display;
    Result                           result = static_cast<Result>(
      d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
    return createResultValue(
      result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
    PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::DisplayKHR display;
    Result                           result = static_cast<Result>(
      d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
    ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>(
      result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/

  //=== VK_EXT_display_surface_counter ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR                surface,
                                                VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
      m_physicalDevice,
      static_cast<VkSurfaceKHR>( surface ),
      reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
    PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
      m_physicalDevice,
      static_cast<VkSurfaceKHR>( surface ),
      reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) );
    return createResultValue(
      result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_display_control ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR                  display,
                                    const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
                                    Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkDisplayPowerControlEXT( m_device,
                                  static_cast<VkDisplayKHR>( display ),
                                  reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT(
    VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkDisplayPowerControlEXT( m_device,
                                  static_cast<VkDisplayKHR>( display ),
                                  reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT *  pDeviceEventInfo,
                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                              VULKAN_HPP_NAMESPACE::Fence *                     pFence,
                              Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkRegisterDeviceEventEXT( m_device,
                                  reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
                                  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                  reinterpret_cast<VkFence *>( pFence ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
    Device::registerEventEXT( const DeviceEventInfoEXT &          deviceEventInfo,
                              Optional<const AllocationCallbacks> allocator,
                              Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Fence fence;
    Result                      result = static_cast<Result>(
      d.vkRegisterDeviceEventEXT( m_device,
                                  reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkFence *>( &fence ) ) );
    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
    Device::registerEventEXTUnique( const DeviceEventInfoEXT &          deviceEventInfo,
                                    Optional<const AllocationCallbacks> allocator,
                                    Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Fence fence;
    Result                      result = static_cast<Result>(
      d.vkRegisterDeviceEventEXT( m_device,
                                  reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkFence *>( &fence ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
      result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR                  display,
                                     const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
                                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                     VULKAN_HPP_NAMESPACE::Fence *                     pFence,
                                     Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkRegisterDisplayEventEXT( m_device,
                                   static_cast<VkDisplayKHR>( display ),
                                   reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                   reinterpret_cast<VkFence *>( pFence ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
    Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR    display,
                                     const DisplayEventInfoEXT &         displayEventInfo,
                                     Optional<const AllocationCallbacks> allocator,
                                     Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Fence fence;
    Result                      result = static_cast<Result>(
      d.vkRegisterDisplayEventEXT( m_device,
                                   static_cast<VkDisplayKHR>( display ),
                                   reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkFence *>( &fence ) ) );
    return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
    Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR    display,
                                           const DisplayEventInfoEXT &         displayEventInfo,
                                           Optional<const AllocationCallbacks> allocator,
                                           Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::Fence fence;
    Result                      result = static_cast<Result>(
      d.vkRegisterDisplayEventEXT( m_device,
                                   static_cast<VkDisplayKHR>( display ),
                                   reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                   reinterpret_cast<VkFence *>( &fence ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>(
      result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR              swapchain,
                                    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
                                    uint64_t *                                      pCounterValue,
                                    Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device,
                                                            static_cast<VkSwapchainKHR>( swapchain ),
                                                            static_cast<VkSurfaceCounterFlagBitsEXT>( counter ),
                                                            pCounterValue ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
                                          Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR              swapchain,
                                    VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
                                    Dispatch const &                                d ) const
  {
    uint64_t counterValue;
    Result   result =
      static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device,
                                                       static_cast<VkSwapchainKHR>( swapchain ),
                                                       static_cast<VkSurfaceCounterFlagBitsEXT>( counter ),
                                                       &counterValue ) );
    return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_GOOGLE_display_timing ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR                 swapchain,
                                           VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
      m_device,
      static_cast<VkSwapchainKHR>( swapchain ),
      reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
    Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
    Result                                           result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
      m_device,
      static_cast<VkSwapchainKHR>( swapchain ),
      reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) );
    return createResultValue(
      result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
                                             uint32_t *                         pPresentationTimingCount,
                                             VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
      m_device,
      static_cast<VkSwapchainKHR>( swapchain ),
      pPresentationTimingCount,
      reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
    Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  {
    std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
    uint32_t                                                                         presentationTimingCount;
    Result                                                                           result;
    do
    {
      result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
        m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && presentationTimingCount )
      {
        presentationTimings.resize( presentationTimingCount );
        result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
          m_device,
          static_cast<VkSwapchainKHR>( swapchain ),
          &presentationTimingCount,
          reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
        VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) )
    {
      presentationTimings.resize( presentationTimingCount );
    }
    return createResultValue(
      result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
  }

  template <
    typename PastPresentationTimingGOOGLEAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
    Device::getPastPresentationTimingGOOGLE(
      VULKAN_HPP_NAMESPACE::SwapchainKHR      swapchain,
      PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
      Dispatch const &                        d ) const
  {
    std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
      pastPresentationTimingGOOGLEAllocator );
    uint32_t presentationTimingCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
        m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && presentationTimingCount )
      {
        presentationTimings.resize( presentationTimingCount );
        result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE(
          m_device,
          static_cast<VkSwapchainKHR>( swapchain ),
          &presentationTimingCount,
          reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
        VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) )
    {
      presentationTimings.resize( presentationTimingCount );
    }
    return createResultValue(
      result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_discard_rectangles ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
                                                                uint32_t discardRectangleCount,
                                                                const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
                                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer,
                                   firstDiscardRectangle,
                                   discardRectangleCount,
                                   reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setDiscardRectangleEXT( uint32_t                                               firstDiscardRectangle,
                                           ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDiscardRectangleEXT( m_commandBuffer,
                                   firstDiscardRectangle,
                                   discardRectangles.size(),
                                   reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_hdr_metadata ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t                                     swapchainCount,
                                                    const VULKAN_HPP_NAMESPACE::SwapchainKHR *   pSwapchains,
                                                    const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkSetHdrMetadataEXT( m_device,
                           swapchainCount,
                           reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ),
                           reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const &   swapchains,
                               ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
#  else
    if ( swapchains.size() != metadata.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    d.vkSetHdrMetadataEXT( m_device,
                           swapchains.size(),
                           reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ),
                           reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_create_renderpass2 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
                                  const VULKAN_HPP_NAMESPACE::AllocationCallbacks *   pAllocator,
                                  VULKAN_HPP_NAMESPACE::RenderPass *                  pRenderPass,
                                  Dispatch const &                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateRenderPass2KHR( m_device,
                                reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
    Device::createRenderPass2KHR( const RenderPassCreateInfo2 &       createInfo,
                                  Optional<const AllocationCallbacks> allocator,
                                  Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
    Result                           result = static_cast<Result>(
      d.vkCreateRenderPass2KHR( m_device,
                                reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
    return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
    Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 &       createInfo,
                                        Optional<const AllocationCallbacks> allocator,
                                        Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
    Result                           result = static_cast<Result>(
      d.vkCreateRenderPass2KHR( m_device,
                                reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
                                reinterpret_cast<const VkAllocationCallbacks *>(
                                  static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>(
      result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
                                        const VULKAN_HPP_NAMESPACE::SubpassBeginInfo *    pSubpassBeginInfo,
                                        Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginRenderPass2KHR( m_commandBuffer,
                                reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ),
                                reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin,
                                                             const SubpassBeginInfo &    subpassBeginInfo,
                                                             Dispatch const &            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginRenderPass2KHR( m_commandBuffer,
                                reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
                                reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
                                    const VULKAN_HPP_NAMESPACE::SubpassEndInfo *   pSubpassEndInfo,
                                    Dispatch const &                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdNextSubpass2KHR( m_commandBuffer,
                            reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ),
                            reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo,
                                                         const SubpassEndInfo &   subpassEndInfo,
                                                         Dispatch const &         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdNextSubpass2KHR( m_commandBuffer,
                            reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
                            reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
                                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo,
                                                           Dispatch const &       d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_shared_presentable_image ===

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR(
    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
    Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  {
    Result result =
      static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
    return createResultValue(
      result,
      VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_external_fence_capabilities ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
    VULKAN_HPP_NAMESPACE::ExternalFenceProperties *               pExternalFenceProperties,
    Dispatch const &                                              d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceExternalFencePropertiesKHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
      reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
                                         PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo,
                                                   Dispatch const &                        d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
    d.vkGetPhysicalDeviceExternalFencePropertiesKHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
      reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
    return externalFenceProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  //=== VK_KHR_external_fence_win32 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR(
    const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo,
    Dispatch const &                                            d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkImportFenceWin32HandleKHR(
      m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo,
                                       Dispatch const &                      d ) const
  {
    Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR(
      m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
                                    HANDLE *                                                 pHandle,
                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetFenceWin32HandleKHR(
      m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
                                          Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  {
    HANDLE handle;
    Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR(
      m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
    return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

  //=== VK_KHR_external_fence_fd ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
                              Dispatch const &                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
                           int *                                           pFd,
                           Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type
                                          Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
  {
    int    fd;
    Result result = static_cast<Result>(
      d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) );
    return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_performance_query ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
    uint32_t                                                 queueFamilyIndex,
    uint32_t *                                               pCounterCount,
    VULKAN_HPP_NAMESPACE::PerformanceCounterKHR *            pCounters,
    VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
    Dispatch const &                                         d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
      m_physicalDevice,
      queueFamilyIndex,
      pCounterCount,
      reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
      reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Allocator, typename Dispatch>
  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
    PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
      uint32_t                                                        queueFamilyIndex,
      ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
      Dispatch const &                                                d ) const
  {
    std::vector<PerformanceCounterDescriptionKHR, Allocator> counterDescriptions;
    uint32_t                                                 counterCount;
    Result                                                   result;
    do
    {
      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
        m_physicalDevice,
        queueFamilyIndex,
        counters.size(),
        reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
        nullptr ) );
      if ( ( result == Result::eSuccess ) && counterCount )
      {
        counterDescriptions.resize( counterCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
          m_physicalDevice,
          queueFamilyIndex,
          counters.size(),
          reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
          reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
      }
    } while ( result == Result::eIncomplete );
    if ( result == Result::eSuccess )
    {
      VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
      counterDescriptions.resize( counterCount );
    }
    return createResultValue( result,
                              counterDescriptions,
                              VULKAN_HPP_NAMESPACE_STRING
                              "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  }

  template <
    typename Allocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type>
  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR, Allocator>>::type
    PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
      uint32_t                                                        queueFamilyIndex,
      ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const & counters,
      Allocator const &                                               vectorAllocator,
      Dispatch const &                                                d ) const
  {
    std::vector<PerformanceCounterDescriptionKHR, Allocator> counterDescriptions( vectorAllocator );
    uint32_t                                                 counterCount;
    Result                                                   result;
    do
    {
      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
        m_physicalDevice,
        queueFamilyIndex,
        counters.size(),
        reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
        nullptr ) );
      if ( ( result == Result::eSuccess ) && counterCount )
      {
        counterDescriptions.resize( counterCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
          m_physicalDevice,
          queueFamilyIndex,
          counters.size(),
          reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
          reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
      }
    } while ( result == Result::eIncomplete );
    if ( result == Result::eSuccess )
    {
      VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
      counterDescriptions.resize( counterCount );
    }
    return createResultValue( result,
                              counterDescriptions,
                              VULKAN_HPP_NAMESPACE_STRING
                              "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  }

  template <typename PerformanceCounterKHRAllocator,
            typename PerformanceCounterDescriptionKHRAllocator,
            typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
              std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
    PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t         queueFamilyIndex,
                                                                     Dispatch const & d ) const
  {
    std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
              std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
                                                                                               data;
    std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> &                       counters = data.first;
    std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions =
      data.second;
    uint32_t counterCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
        m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
      if ( ( result == Result::eSuccess ) && counterCount )
      {
        counters.resize( counterCount );
        counterDescriptions.resize( counterCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
          m_physicalDevice,
          queueFamilyIndex,
          &counterCount,
          reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
          reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
        VULKAN_HPP_ASSERT( counterCount <= counters.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
    {
      counters.resize( counterCount );
      counterDescriptions.resize( counterCount );
    }
    return createResultValue(
      result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  }

  template <typename PerformanceCounterKHRAllocator,
            typename PerformanceCounterDescriptionKHRAllocator,
            typename Dispatch,
            typename B1,
            typename B2,
            typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
                                      std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
                                    int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
              std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
    PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR(
      uint32_t                                    queueFamilyIndex,
      PerformanceCounterKHRAllocator &            performanceCounterKHRAllocator,
      PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
      Dispatch const &                            d ) const
  {
    std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
              std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
                                                                                               data( std::piecewise_construct,
            std::forward_as_tuple( performanceCounterKHRAllocator ),
            std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
    std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> &                       counters = data.first;
    std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions =
      data.second;
    uint32_t counterCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
        m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
      if ( ( result == Result::eSuccess ) && counterCount )
      {
        counters.resize( counterCount );
        counterDescriptions.resize( counterCount );
        result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
          m_physicalDevice,
          queueFamilyIndex,
          &counterCount,
          reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
          reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
        VULKAN_HPP_ASSERT( counterCount <= counters.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
    {
      counters.resize( counterCount );
      counterDescriptions.resize( counterCount );
    }
    return createResultValue(
      result, data, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
    const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
    uint32_t *                                                      pNumPasses,
    Dispatch const &                                                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
      m_physicalDevice,
      reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ),
      pNumPasses );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
    const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    uint32_t numPasses;
    d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
      m_physicalDevice,
      reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ),
      &numPasses );
    return numPasses;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR(
    const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkReleaseProfilingLockKHR( m_device );
  }

  //=== VK_KHR_get_surface_capabilities2 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR *             pSurfaceCapabilities,
    Dispatch const &                                            d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
      reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
    PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                                Dispatch const &                      d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
      reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
    return createResultValue(
      result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
                                          PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                                Dispatch const &                      d ) const
  {
    StructureChain<X, Y, Z...>                      structureChain;
    VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities =
      structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
    Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
      reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
    return createResultValue(
      result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
                                           uint32_t *                                pSurfaceFormatCount,
                                           VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
                                           Dispatch const &                          d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
      pSurfaceFormatCount,
      reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
    PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  {
    std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
    uint32_t                                                   surfaceFormatCount;
    Result                                                     result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
        m_physicalDevice,
        reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
        &surfaceFormatCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
      {
        surfaceFormats.resize( surfaceFormatCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
          m_physicalDevice,
          reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
          &surfaceFormatCount,
          reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
        VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
    {
      surfaceFormats.resize( surfaceFormatCount );
    }
    return createResultValue(
      result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  }

  template <typename SurfaceFormat2KHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
    PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                           SurfaceFormat2KHRAllocator &          surfaceFormat2KHRAllocator,
                                           Dispatch const &                      d ) const
  {
    std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
    uint32_t                                                   surfaceFormatCount;
    Result                                                     result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
        m_physicalDevice,
        reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
        &surfaceFormatCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && surfaceFormatCount )
      {
        surfaceFormats.resize( surfaceFormatCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR(
          m_physicalDevice,
          reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
          &surfaceFormatCount,
          reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
        VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
    {
      surfaceFormats.resize( surfaceFormatCount );
    }
    return createResultValue(
      result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_get_display_properties2 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getDisplayProperties2KHR( uint32_t *                                    pPropertyCount,
                                              VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
      m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename DisplayProperties2KHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
    PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
  {
    std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
    uint32_t                                                           propertyCount;
    Result                                                             result;
    do
    {
      result =
        static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
  }

  template <typename DisplayProperties2KHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
    PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator,
                                              Dispatch const &                 d ) const
  {
    std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
    uint32_t                                                           propertyCount;
    Result                                                             result;
    do
    {
      result =
        static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t *                                         pPropertyCount,
                                                   VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
                                                   Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
      m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
    PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
  {
    std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
    uint32_t                                                                     propertyCount;
    Result                                                                       result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
  }

  template <typename DisplayPlaneProperties2KHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
    PhysicalDevice::getDisplayPlaneProperties2KHR(
      DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
  {
    std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties(
      displayPlaneProperties2KHRAllocator );
    uint32_t propertyCount;
    Result   result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
          m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR                  display,
                                                  uint32_t *                                        pPropertyCount,
                                                  VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
                                        static_cast<VkDisplayKHR>( display ),
                                        pPropertyCount,
                                        reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
    PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  {
    std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
    uint32_t                                                                   propertyCount;
    Result                                                                     result;
    do
    {
      result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
        m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>(
          d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
                                            static_cast<VkDisplayKHR>( display ),
                                            &propertyCount,
                                            reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
  }

  template <typename DisplayModeProperties2KHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
    PhysicalDevice::getDisplayModeProperties2KHR(
      VULKAN_HPP_NAMESPACE::DisplayKHR     display,
      DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
      Dispatch const &                     d ) const
  {
    std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties(
      displayModeProperties2KHRAllocator );
    uint32_t propertyCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
        m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>(
          d.vkGetDisplayModeProperties2KHR( m_physicalDevice,
                                            static_cast<VkDisplayKHR>( display ),
                                            &propertyCount,
                                            reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR(
    const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR *   pDisplayPlaneInfo,
    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
    Dispatch const &                                     d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
                                           reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
                                           reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
    PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo,
                                                     Dispatch const &             d ) const
  {
    VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
    Result                                             result = static_cast<Result>(
      d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
                                           reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
                                           reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) );
    return createResultValue(
      result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_IOS_MVK )
  //=== VK_MVK_ios_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
                                   const VULKAN_HPP_NAMESPACE::AllocationCallbacks *     pAllocator,
                                   VULKAN_HPP_NAMESPACE::SurfaceKHR *                    pSurface,
                                   Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateIOSSurfaceMVK( m_instance,
                               reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                               reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK &     createInfo,
                                   Optional<const AllocationCallbacks> allocator,
                                   Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateIOSSurfaceMVK( m_instance,
                               reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                               reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK &     createInfo,
                                         Optional<const AllocationCallbacks> allocator,
                                         Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateIOSSurfaceMVK( m_instance,
                               reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
                               reinterpret_cast<const VkAllocationCallbacks *>(
                                 static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                               reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_IOS_MVK*/

#if defined( VK_USE_PLATFORM_MACOS_MVK )
  //=== VK_MVK_macos_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
                                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                     VULKAN_HPP_NAMESPACE::SurfaceKHR *                      pSurface,
                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateMacOSSurfaceMVK( m_instance,
                                 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK &   createInfo,
                                     Optional<const AllocationCallbacks> allocator,
                                     Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateMacOSSurfaceMVK( m_instance,
                                 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>(
                                   static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK &   createInfo,
                                           Optional<const AllocationCallbacks> allocator,
                                           Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateMacOSSurfaceMVK( m_instance,
                                 reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>(
                                   static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_MACOS_MVK*/

  //=== VK_EXT_debug_utils ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT(
    const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT(
      m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT(
      m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT(
    const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
                                                         Dispatch const &           d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkQueueEndDebugUtilsLabelEXT( m_queue );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
                                                          Dispatch const &           d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
                                                                 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo,
                                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks *              pAllocator,
                                            VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT *                 pMessenger,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateDebugUtilsMessengerEXT( m_instance,
                                        reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                        reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type
    Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo,
                                            Optional<const AllocationCallbacks>      allocator,
                                            Dispatch const &                         d ) const
  {
    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
    Result                                       result = static_cast<Result>(
      d.vkCreateDebugUtilsMessengerEXT( m_instance,
                                        reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
    return createResultValue(
      result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
    Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo,
                                                  Optional<const AllocationCallbacks>      allocator,
                                                  Dispatch const &                         d ) const
  {
    VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
    Result                                       result = static_cast<Result>(
      d.vkCreateDebugUtilsMessengerEXT( m_instance,
                                        reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>(
      result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT      messenger,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDebugUtilsMessengerEXT( m_instance,
                                       static_cast<VkDebugUtilsMessengerEXT>( messenger ),
                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
                                             Optional<const AllocationCallbacks>          allocator,
                                             Dispatch const &                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDebugUtilsMessengerEXT(
      m_instance,
      static_cast<VkDebugUtilsMessengerEXT>( messenger ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT      messenger,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDebugUtilsMessengerEXT( m_instance,
                                       static_cast<VkDebugUtilsMessengerEXT>( messenger ),
                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
                                            Optional<const AllocationCallbacks>          allocator,
                                            Dispatch const &                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDebugUtilsMessengerEXT(
      m_instance,
      static_cast<VkDebugUtilsMessengerEXT>( messenger ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT(
    VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT       messageSeverity,
    VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT              messageTypes,
    const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
    Dispatch const &                                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkSubmitDebugUtilsMessageEXT( m_instance,
                                    static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
                                    static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
                                    reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
                                          VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT        messageTypes,
                                          const DebugUtilsMessengerCallbackDataEXT &                 callbackData,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkSubmitDebugUtilsMessageEXT( m_instance,
                                    static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
                                    static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
                                    reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  //=== VK_ANDROID_external_memory_android_hardware_buffer ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID(
    const struct AHardwareBuffer *                                 buffer,
    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
    Dispatch const &                                               d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
      m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
    Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
    Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
      m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
                                          Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
  {
    StructureChain<X, Y, Z...>                                     structureChain;
    VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
      structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
    Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID(
      m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
    return createResultValue(
      result, structureChain, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID(
    const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
    struct AHardwareBuffer **                                               pBuffer,
    Dispatch const &                                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID(
      m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type
                                          Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info,
                                                   Dispatch const &                                  d ) const
  {
    struct AHardwareBuffer * buffer;
    Result                   result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID(
      m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) );
    return createResultValue(
      result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_ANDROID_KHR*/

  //=== VK_EXT_sample_locations ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetSampleLocationsEXT( m_commandBuffer,
                                  reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetSampleLocationsEXT( m_commandBuffer,
                                  reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT(
    VULKAN_HPP_NAMESPACE::SampleCountFlagBits        samples,
    VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
    Dispatch const &                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
      m_physicalDevice,
      static_cast<VkSampleCountFlagBits>( samples ),
      reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
                                         PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
                                                 Dispatch const &                          d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
    d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
      m_physicalDevice,
      static_cast<VkSampleCountFlagBits>( samples ),
      reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
    return multisampleProperties;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_get_memory_requirements2 ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
                                            VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
                                            Dispatch const &                            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetImageMemoryRequirements2KHR( m_device,
                                        reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ),
                                        reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
                                         Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info,
                                            Dispatch const &                     d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
    d.vkGetImageMemoryRequirements2KHR( m_device,
                                        reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
                                        reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return memoryRequirements;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info,
                                            Dispatch const &                     d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                  structureChain;
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
      structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
    d.vkGetImageMemoryRequirements2KHR( m_device,
                                        reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
                                        reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
                                             VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
                                             Dispatch const &                            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetBufferMemoryRequirements2KHR( m_device,
                                         reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ),
                                         reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
                                         Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info,
                                             Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
    d.vkGetBufferMemoryRequirements2KHR( m_device,
                                         reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
                                         reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return memoryRequirements;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info,
                                             Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                  structureChain;
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
      structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
    d.vkGetBufferMemoryRequirements2KHR( m_device,
                                         reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
                                         reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR(
    const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
    uint32_t *                                                       pSparseMemoryRequirementCount,
    VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 *           pSparseMemoryRequirements,
    Dispatch const &                                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetImageSparseMemoryRequirements2KHR(
      m_device,
      reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
      pSparseMemoryRequirementCount,
      reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
                      Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info,
                                                  Dispatch const &                           d ) const
  {
    std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
    uint32_t                                                                             sparseMemoryRequirementCount;
    d.vkGetImageSparseMemoryRequirements2KHR( m_device,
                                              reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
                                              &sparseMemoryRequirementCount,
                                              nullptr );
    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
    d.vkGetImageSparseMemoryRequirements2KHR(
      m_device,
      reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
      &sparseMemoryRequirementCount,
      reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
    return sparseMemoryRequirements;
  }

  template <
    typename SparseImageMemoryRequirements2Allocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  VULKAN_HPP_NODISCARD
    VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
                      Device::getImageSparseMemoryRequirements2KHR(
      const ImageSparseMemoryRequirementsInfo2 & info,
      SparseImageMemoryRequirements2Allocator &  sparseImageMemoryRequirements2Allocator,
      Dispatch const &                           d ) const
  {
    std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
      sparseImageMemoryRequirements2Allocator );
    uint32_t sparseMemoryRequirementCount;
    d.vkGetImageSparseMemoryRequirements2KHR( m_device,
                                              reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
                                              &sparseMemoryRequirementCount,
                                              nullptr );
    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
    d.vkGetImageSparseMemoryRequirements2KHR(
      m_device,
      reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
      &sparseMemoryRequirementCount,
      reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
    return sparseMemoryRequirements;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_acceleration_structure ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR(
    const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
    const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                pAllocator,
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR *                 pAccelerationStructure,
    Dispatch const &                                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateAccelerationStructureKHR( m_device,
                                          reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
                                          reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                          reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
    Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo,
                                            Optional<const AllocationCallbacks>        allocator,
                                            Dispatch const &                           d ) const
  {
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
    Result                                         result = static_cast<Result>( d.vkCreateAccelerationStructureKHR(
      m_device,
      reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
    return createResultValue(
      result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
    Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo,
                                                  Optional<const AllocationCallbacks>        allocator,
                                                  Dispatch const &                           d ) const
  {
    VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
    Result                                         result = static_cast<Result>( d.vkCreateAccelerationStructureKHR(
      m_device,
      reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
    ObjectDestroy<Device, Dispatch>                deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>(
      result,
      accelerationStructure,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique",
      deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR    accelerationStructure,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyAccelerationStructureKHR( m_device,
                                         static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
                                         reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
                                             Optional<const AllocationCallbacks>            allocator,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyAccelerationStructureKHR(
      m_device,
      static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR    accelerationStructure,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyAccelerationStructureKHR( m_device,
                                         static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
                                         reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
                                          Optional<const AllocationCallbacks>            allocator,
                                          Dispatch const &                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyAccelerationStructureKHR(
      m_device,
      static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
    uint32_t                                                                     infoCount,
    const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *      pInfos,
    const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
    Dispatch const &                                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBuildAccelerationStructuresKHR(
      m_commandBuffer,
      infoCount,
      reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
      reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
    ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const &      infos,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
#  else
    if ( infos.size() != pBuildRangeInfos.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    d.vkCmdBuildAccelerationStructuresKHR(
      m_commandBuffer,
      infos.size(),
      reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
      reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
    uint32_t                                                                infoCount,
    const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
    const VULKAN_HPP_NAMESPACE::DeviceAddress *                             pIndirectDeviceAddresses,
    const uint32_t *                                                        pIndirectStrides,
    const uint32_t * const *                                                ppMaxPrimitiveCounts,
    Dispatch const &                                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBuildAccelerationStructuresIndirectKHR(
      m_commandBuffer,
      infoCount,
      reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
      reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
      pIndirectStrides,
      ppMaxPrimitiveCounts );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
    ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const &                             indirectDeviceAddresses,
    ArrayProxy<const uint32_t> const &                                                        indirectStrides,
    ArrayProxy<const uint32_t * const> const &                                                pMaxPrimitiveCounts,
    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
    VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
    VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
#  else
    if ( infos.size() != indirectDeviceAddresses.size() )
    {
      throw LogicError(
        VULKAN_HPP_NAMESPACE_STRING
        "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
    }
    if ( infos.size() != indirectStrides.size() )
    {
      throw LogicError(
        VULKAN_HPP_NAMESPACE_STRING
        "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
    }
    if ( infos.size() != pMaxPrimitiveCounts.size() )
    {
      throw LogicError(
        VULKAN_HPP_NAMESPACE_STRING
        "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    d.vkCmdBuildAccelerationStructuresIndirectKHR(
      m_commandBuffer,
      infos.size(),
      reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
      reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
      indirectStrides.data(),
      pMaxPrimitiveCounts.data() );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR(
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                   deferredOperation,
    uint32_t                                                                     infoCount,
    const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR *      pInfos,
    const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
    Dispatch const &                                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkBuildAccelerationStructuresKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      infoCount,
      reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
      reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR(
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                                     deferredOperation,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const &      infos,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
#  else
    if ( infos.size() != pBuildRangeInfos.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    Result result = static_cast<Result>( d.vkBuildAccelerationStructuresKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      infos.size(),
      reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
      reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
                                          const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCopyAccelerationStructureKHR( m_device,
                                        static_cast<VkDeferredOperationKHR>( deferredOperation ),
                                        reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
                                          const CopyAccelerationStructureInfoKHR &   info,
                                          Dispatch const &                           d ) const
  {
    Result result = static_cast<Result>(
      d.vkCopyAccelerationStructureKHR( m_device,
                                        static_cast<VkDeferredOperationKHR>( deferredOperation ),
                                        reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR(
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR                             deferredOperation,
    const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
    Dispatch const &                                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR       deferredOperation,
                                                  const CopyAccelerationStructureToMemoryInfoKHR & info,
                                                  Dispatch const &                                 d ) const
  {
    Result result = static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR(
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR                             deferredOperation,
    const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
    Dispatch const &                                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR       deferredOperation,
                                                  const CopyMemoryToAccelerationStructureInfoKHR & info,
                                                  Dispatch const &                                 d ) const
  {
    Result result = static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR(
    uint32_t                                               accelerationStructureCount,
    const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
    VULKAN_HPP_NAMESPACE::QueryType                        queryType,
    size_t                                                 dataSize,
    void *                                                 pData,
    size_t                                                 stride,
    Dispatch const &                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
      m_device,
      accelerationStructureCount,
      reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
      static_cast<VkQueryType>( queryType ),
      dataSize,
      pData,
      stride ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename T, typename Dispatch>
  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<void>::type Device::writeAccelerationStructuresPropertiesKHR(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
      VULKAN_HPP_NAMESPACE::QueryType                                          queryType,
      ArrayProxy<T> const &                                                    data,
      size_t                                                                   stride,
      Dispatch const &                                                         d ) const
  {
    Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
      m_device,
      accelerationStructures.size(),
      reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
      static_cast<VkQueryType>( queryType ),
      data.size() * sizeof( T ),
      reinterpret_cast<void *>( data.data() ),
      stride ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
  }

  template <typename T, typename Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
                                          Device::writeAccelerationStructuresPropertiesKHR(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
      VULKAN_HPP_NAMESPACE::QueryType                                          queryType,
      size_t                                                                   dataSize,
      size_t                                                                   stride,
      Dispatch const &                                                         d ) const
  {
    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
    std::vector<T, Allocator> data( dataSize / sizeof( T ) );
    Result                    result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
      m_device,
      accelerationStructures.size(),
      reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
      static_cast<VkQueryType>( queryType ),
      data.size() * sizeof( T ),
      reinterpret_cast<void *>( data.data() ),
      stride ) );
    return createResultValue(
      result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
  }

  template <typename T, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
                                          Device::writeAccelerationStructuresPropertyKHR(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
      VULKAN_HPP_NAMESPACE::QueryType                                          queryType,
      size_t                                                                   stride,
      Dispatch const &                                                         d ) const
  {
    T      data;
    Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR(
      m_device,
      accelerationStructures.size(),
      reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
      static_cast<VkQueryType>( queryType ),
      sizeof( T ),
      reinterpret_cast<void *>( &data ),
      stride ) );
    return createResultValue(
      result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
                                                 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer,
                                         reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info,
                                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer,
                                         reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR(
    const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
    Dispatch const &                                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyAccelerationStructureToMemoryKHR(
      m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyAccelerationStructureToMemoryKHR(
      m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR(
    const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
    Dispatch const &                                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyMemoryToAccelerationStructureKHR(
      m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyMemoryToAccelerationStructureKHR(
      m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR(
    const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
    Dispatch const &                                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR(
      m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR(
    const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetAccelerationStructureDeviceAddressKHR(
      m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
    uint32_t                                               accelerationStructureCount,
    const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
    VULKAN_HPP_NAMESPACE::QueryType                        queryType,
    VULKAN_HPP_NAMESPACE::QueryPool                        queryPool,
    uint32_t                                               firstQuery,
    Dispatch const &                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWriteAccelerationStructuresPropertiesKHR(
      m_commandBuffer,
      accelerationStructureCount,
      reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
      static_cast<VkQueryType>( queryType ),
      static_cast<VkQueryPool>( queryPool ),
      firstQuery );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
    ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
    VULKAN_HPP_NAMESPACE::QueryType                                          queryType,
    VULKAN_HPP_NAMESPACE::QueryPool                                          queryPool,
    uint32_t                                                                 firstQuery,
    Dispatch const &                                                         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWriteAccelerationStructuresPropertiesKHR(
      m_commandBuffer,
      accelerationStructures.size(),
      reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
      static_cast<VkQueryType>( queryType ),
      static_cast<VkQueryPool>( queryPool ),
      firstQuery );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR(
    const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
    VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR *     pCompatibility,
    Dispatch const &                                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetDeviceAccelerationStructureCompatibilityKHR(
      m_device,
      reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
      reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
                                         Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
    d.vkGetDeviceAccelerationStructureCompatibilityKHR(
      m_device,
      reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
      reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
    return compatibility;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR(
    VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR                 buildType,
    const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
    const uint32_t *                                                        pMaxPrimitiveCounts,
    VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR *          pSizeInfo,
    Dispatch const &                                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetAccelerationStructureBuildSizesKHR(
      m_device,
      static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
      reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
      pMaxPrimitiveCounts,
      reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
                                         Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
                                                   const AccelerationStructureBuildGeometryInfoKHR &       buildInfo,
                                                   ArrayProxy<const uint32_t> const & maxPrimitiveCounts,
                                                   Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
#  else
    if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
    {
      throw LogicError(
        VULKAN_HPP_NAMESPACE_STRING
        "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
    d.vkGetAccelerationStructureBuildSizesKHR(
      m_device,
      static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
      reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
      maxPrimitiveCounts.data(),
      reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
    return sizeInfo;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_sampler_ycbcr_conversion ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks *              pAllocator,
                                             VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateSamplerYcbcrConversionKHR( m_device,
                                           reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
                                           reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                           reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
    Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo,
                                             Optional<const AllocationCallbacks>      allocator,
                                             Dispatch const &                         d ) const
  {
    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
    Result                                       result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR(
      m_device,
      reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
    return createResultValue(
      result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
    Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo,
                                                   Optional<const AllocationCallbacks>      allocator,
                                                   Dispatch const &                         d ) const
  {
    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
    Result                                       result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR(
      m_device,
      reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
    ObjectDestroy<Device, Dispatch>              deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>(
      result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion      ycbcrConversion,
                                              const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySamplerYcbcrConversionKHR( m_device,
                                          static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
                                          reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
                                              Optional<const AllocationCallbacks>          allocator,
                                              Dispatch const &                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroySamplerYcbcrConversionKHR(
      m_device,
      static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_bind_memory2 ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::bindBufferMemory2KHR( uint32_t                                           bindInfoCount,
                                  const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
                                  Dispatch const &                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkBindBufferMemory2KHR(
      m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
                                  Dispatch const &                                                     d ) const
  {
    Result result = static_cast<Result>( d.vkBindBufferMemory2KHR(
      m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::bindImageMemory2KHR( uint32_t                                          bindInfoCount,
                                 const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
                                 Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkBindImageMemory2KHR(
      m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos,
                                 Dispatch const &                                                    d ) const
  {
    Result result = static_cast<Result>( d.vkBindImageMemory2KHR(
      m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_image_drm_format_modifier ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT(
    VULKAN_HPP_NAMESPACE::Image                                 image,
    VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties,
    Dispatch const &                                            d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
      m_device,
      static_cast<VkImage>( image ),
      reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
    Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
    Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
      m_device,
      static_cast<VkImage>( image ),
      reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) );
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_validation_cache ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *          pAllocator,
                                      VULKAN_HPP_NAMESPACE::ValidationCacheEXT *                 pValidationCache,
                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateValidationCacheEXT( m_device,
                                    reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                    reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type
    Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo,
                                      Optional<const AllocationCallbacks>  allocator,
                                      Dispatch const &                     d ) const
  {
    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
    Result                                   result = static_cast<Result>(
      d.vkCreateValidationCacheEXT( m_device,
                                    reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                    reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
    return createResultValue(
      result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
    Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo,
                                            Optional<const AllocationCallbacks>  allocator,
                                            Dispatch const &                     d ) const
  {
    VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
    Result                                   result = static_cast<Result>(
      d.vkCreateValidationCacheEXT( m_device,
                                    reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                    reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>(
      result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT          validationCache,
                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                       Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyValidationCacheEXT( m_device,
                                   static_cast<VkValidationCacheEXT>( validationCache ),
                                   reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
                                                            Optional<const AllocationCallbacks>      allocator,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyValidationCacheEXT( m_device,
                                   static_cast<VkValidationCacheEXT>( validationCache ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT          validationCache,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyValidationCacheEXT( m_device,
                                   static_cast<VkValidationCacheEXT>( validationCache ),
                                   reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
                                          Optional<const AllocationCallbacks>      allocator,
                                          Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyValidationCacheEXT( m_device,
                                   static_cast<VkValidationCacheEXT>( validationCache ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT         dstCache,
                                      uint32_t                                         srcCacheCount,
                                      const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
                                      Dispatch const &                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkMergeValidationCachesEXT( m_device,
                                    static_cast<VkValidationCacheEXT>( dstCache ),
                                    srcCacheCount,
                                    reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT                           dstCache,
                                      ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
                                      Dispatch const &                                                   d ) const
  {
    Result result = static_cast<Result>(
      d.vkMergeValidationCachesEXT( m_device,
                                    static_cast<VkValidationCacheEXT>( dstCache ),
                                    srcCaches.size(),
                                    reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
                                       size_t *                                 pDataSize,
                                       void *                                   pData,
                                       Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetValidationCacheDataEXT(
      m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Uint8_tAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
                       Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
                                       Dispatch const &                         d ) const
  {
    std::vector<uint8_t, Uint8_tAllocator> data;
    size_t                                 dataSize;
    Result                                 result;
    do
    {
      result = static_cast<Result>( d.vkGetValidationCacheDataEXT(
        m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
      if ( ( result == Result::eSuccess ) && dataSize )
      {
        data.resize( dataSize );
        result =
          static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device,
                                                              static_cast<VkValidationCacheEXT>( validationCache ),
                                                              &dataSize,
                                                              reinterpret_cast<void *>( data.data() ) ) );
        VULKAN_HPP_ASSERT( dataSize <= data.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
    {
      data.resize( dataSize );
    }
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
  }

  template <typename Uint8_tAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
                       Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
                                       Uint8_tAllocator &                       uint8_tAllocator,
                                       Dispatch const &                         d ) const
  {
    std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
    size_t                                 dataSize;
    Result                                 result;
    do
    {
      result = static_cast<Result>( d.vkGetValidationCacheDataEXT(
        m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
      if ( ( result == Result::eSuccess ) && dataSize )
      {
        data.resize( dataSize );
        result =
          static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device,
                                                              static_cast<VkValidationCacheEXT>( validationCache ),
                                                              &dataSize,
                                                              reinterpret_cast<void *>( data.data() ) ) );
        VULKAN_HPP_ASSERT( dataSize <= data.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
    {
      data.resize( dataSize );
    }
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_NV_shading_rate_image ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView   imageView,
                                                                VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
                                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBindShadingRateImageNV(
      m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
    uint32_t                                           firstViewport,
    uint32_t                                           viewportCount,
    const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
    Dispatch const &                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer,
                                            firstViewport,
                                            viewportCount,
                                            reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
    uint32_t                                                             firstViewport,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
    Dispatch const &                                                     d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetViewportShadingRatePaletteNV(
      m_commandBuffer,
      firstViewport,
      shadingRatePalettes.size(),
      reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
                                           uint32_t                                      customSampleOrderCount,
                                           const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
                                   static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
                                   customSampleOrderCount,
                                   reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV(
    VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV                             sampleOrderType,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
    Dispatch const &                                                          d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
                                   static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
                                   customSampleOrders.size(),
                                   reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_NV_ray_tracing ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
                                           const VULKAN_HPP_NAMESPACE::AllocationCallbacks *               pAllocator,
                                           VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
                                           Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateAccelerationStructureNV( m_device,
                                         reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
                                         reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                         reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type
    Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo,
                                           Optional<const AllocationCallbacks>       allocator,
                                           Dispatch const &                          d ) const
  {
    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
    Result                                        result = static_cast<Result>( d.vkCreateAccelerationStructureNV(
      m_device,
      reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
    return createResultValue(
      result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
    Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo,
                                                 Optional<const AllocationCallbacks>       allocator,
                                                 Dispatch const &                          d ) const
  {
    VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
    Result                                        result = static_cast<Result>( d.vkCreateAccelerationStructureNV(
      m_device,
      reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
    ObjectDestroy<Device, Dispatch>               deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>(
      result,
      accelerationStructure,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique",
      deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV     accelerationStructure,
                                            const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyAccelerationStructureNV( m_device,
                                        static_cast<VkAccelerationStructureNV>( accelerationStructure ),
                                        reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
                                            Optional<const AllocationCallbacks>           allocator,
                                            Dispatch const &                              d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyAccelerationStructureNV(
      m_device,
      static_cast<VkAccelerationStructureNV>( accelerationStructure ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV     accelerationStructure,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyAccelerationStructureNV( m_device,
                                        static_cast<VkAccelerationStructureNV>( accelerationStructure ),
                                        reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
                                          Optional<const AllocationCallbacks>           allocator,
                                          Dispatch const &                              d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyAccelerationStructureNV(
      m_device,
      static_cast<VkAccelerationStructureNV>( accelerationStructure ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV(
    const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR *                              pMemoryRequirements,
    Dispatch const &                                                            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetAccelerationStructureMemoryRequirementsNV(
      m_device,
      reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
      reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR
                                         Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info,
                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
    d.vkGetAccelerationStructureMemoryRequirementsNV(
      m_device,
      reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
      reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
    return memoryRequirements;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info,
                                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                     structureChain;
    VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements =
      structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
    d.vkGetAccelerationStructureMemoryRequirementsNV(
      m_device,
      reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
      reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV(
    uint32_t                                                            bindInfoCount,
    const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos,
    Dispatch const &                                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV(
      m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::bindAccelerationStructureMemoryNV(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
      Dispatch const &                                                                      d ) const
  {
    Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV(
      m_device,
      bindInfos.size(),
      reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
                                                 VULKAN_HPP_NAMESPACE::Buffer                              instanceData,
                                                 VULKAN_HPP_NAMESPACE::DeviceSize              instanceOffset,
                                                 VULKAN_HPP_NAMESPACE::Bool32                  update,
                                                 VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
                                                 VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
                                                 VULKAN_HPP_NAMESPACE::Buffer                  scratch,
                                                 VULKAN_HPP_NAMESPACE::DeviceSize              scratchOffset,
                                                 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
                                         reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ),
                                         static_cast<VkBuffer>( instanceData ),
                                         static_cast<VkDeviceSize>( instanceOffset ),
                                         static_cast<VkBool32>( update ),
                                         static_cast<VkAccelerationStructureNV>( dst ),
                                         static_cast<VkAccelerationStructureNV>( src ),
                                         static_cast<VkBuffer>( scratch ),
                                         static_cast<VkDeviceSize>( scratchOffset ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info,
                                                                      VULKAN_HPP_NAMESPACE::Buffer        instanceData,
                                                                      VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
                                                                      VULKAN_HPP_NAMESPACE::Bool32     update,
                                                                      VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
                                                                      VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
                                                                      VULKAN_HPP_NAMESPACE::Buffer     scratch,
                                                                      VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
                                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
                                         reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
                                         static_cast<VkBuffer>( instanceData ),
                                         static_cast<VkDeviceSize>( instanceOffset ),
                                         static_cast<VkBool32>( update ),
                                         static_cast<VkAccelerationStructureNV>( dst ),
                                         static_cast<VkAccelerationStructureNV>( src ),
                                         static_cast<VkBuffer>( scratch ),
                                         static_cast<VkDeviceSize>( scratchOffset ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV          dst,
                                                VULKAN_HPP_NAMESPACE::AccelerationStructureNV          src,
                                                VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
                                        static_cast<VkAccelerationStructureNV>( dst ),
                                        static_cast<VkAccelerationStructureNV>( src ),
                                        static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer     raygenShaderBindingTableBuffer,
                                                     VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
                                                     VULKAN_HPP_NAMESPACE::Buffer     missShaderBindingTableBuffer,
                                                     VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
                                                     VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
                                                     VULKAN_HPP_NAMESPACE::Buffer     hitShaderBindingTableBuffer,
                                                     VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
                                                     VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
                                                     VULKAN_HPP_NAMESPACE::Buffer     callableShaderBindingTableBuffer,
                                                     VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
                                                     VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
                                                     uint32_t                         width,
                                                     uint32_t                         height,
                                                     uint32_t                         depth,
                                                     Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdTraceRaysNV( m_commandBuffer,
                        static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
                        static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
                        static_cast<VkBuffer>( missShaderBindingTableBuffer ),
                        static_cast<VkDeviceSize>( missShaderBindingOffset ),
                        static_cast<VkDeviceSize>( missShaderBindingStride ),
                        static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
                        static_cast<VkDeviceSize>( hitShaderBindingOffset ),
                        static_cast<VkDeviceSize>( hitShaderBindingStride ),
                        static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
                        static_cast<VkDeviceSize>( callableShaderBindingOffset ),
                        static_cast<VkDeviceSize>( callableShaderBindingStride ),
                        width,
                        height,
                        depth );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache                          pipelineCache,
                                         uint32_t                                                     createInfoCount,
                                         const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
                                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks *            pAllocator,
                                         VULKAN_HPP_NAMESPACE::Pipeline *                             pPipelines,
                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateRayTracingPipelinesNV( m_device,
                                       static_cast<VkPipelineCache>( pipelineCache ),
                                       createInfoCount,
                                       reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
                                       reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                       reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PipelineAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
                                         Device::createRayTracingPipelinesNV(
      VULKAN_HPP_NAMESPACE::PipelineCache                                            pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
      Optional<const AllocationCallbacks>                                            allocator,
      Dispatch const &                                                               d ) const
  {
    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
    Result                                   result = static_cast<Result>(
      d.vkCreateRayTracingPipelinesNV( m_device,
                                       static_cast<VkPipelineCache>( pipelineCache ),
                                       createInfos.size(),
                                       reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
                                       reinterpret_cast<const VkAllocationCallbacks *>(
                                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                       reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    return createResultValue(
      result,
      pipelines,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename PipelineAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
                                         Device::createRayTracingPipelinesNV(
      VULKAN_HPP_NAMESPACE::PipelineCache                                            pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
      Optional<const AllocationCallbacks>                                            allocator,
      PipelineAllocator &                                                            pipelineAllocator,
      Dispatch const &                                                               d ) const
  {
    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
    Result                                   result = static_cast<Result>(
      d.vkCreateRayTracingPipelinesNV( m_device,
                                       static_cast<VkPipelineCache>( pipelineCache ),
                                       createInfos.size(),
                                       reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
                                       reinterpret_cast<const VkAllocationCallbacks *>(
                                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                       reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    return createResultValue(
      result,
      pipelines,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
                                         Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache                          pipelineCache,
                                        const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
                                        Optional<const AllocationCallbacks>                          allocator,
                                        Dispatch const &                                             d ) const
  {
    Pipeline pipeline;
    Result   result = static_cast<Result>(
      d.vkCreateRayTracingPipelinesNV( m_device,
                                       static_cast<VkPipelineCache>( pipelineCache ),
                                       1,
                                       reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
                                       reinterpret_cast<const VkAllocationCallbacks *>(
                                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                       reinterpret_cast<VkPipeline *>( &pipeline ) ) );
    return createResultValue(
      result,
      pipeline,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch, typename PipelineAllocator>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                                         Device::createRayTracingPipelinesNVUnique(
      VULKAN_HPP_NAMESPACE::PipelineCache                                            pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
      Optional<const AllocationCallbacks>                                            allocator,
      Dispatch const &                                                               d ) const
  {
    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
    std::vector<Pipeline>                                            pipelines( createInfos.size() );
    Result                                                           result = static_cast<Result>(
      d.vkCreateRayTracingPipelinesNV( m_device,
                                       static_cast<VkPipelineCache>( pipelineCache ),
                                       createInfos.size(),
                                       reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
                                       reinterpret_cast<const VkAllocationCallbacks *>(
                                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                       reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
    {
      uniquePipelines.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
      }
    }
    return createResultValue(
      result,
      std::move( uniquePipelines ),
      VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <
    typename Dispatch,
    typename PipelineAllocator,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                                         Device::createRayTracingPipelinesNVUnique(
      VULKAN_HPP_NAMESPACE::PipelineCache                                            pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
      Optional<const AllocationCallbacks>                                            allocator,
      PipelineAllocator &                                                            pipelineAllocator,
      Dispatch const &                                                               d ) const
  {
    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
    std::vector<Pipeline>                                            pipelines( createInfos.size() );
    Result                                                           result = static_cast<Result>(
      d.vkCreateRayTracingPipelinesNV( m_device,
                                       static_cast<VkPipelineCache>( pipelineCache ),
                                       createInfos.size(),
                                       reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
                                       reinterpret_cast<const VkAllocationCallbacks *>(
                                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                       reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
    {
      uniquePipelines.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
      }
    }
    return createResultValue(
      result,
      std::move( uniquePipelines ),
      VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
                                         Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
                                              const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
                                              Optional<const AllocationCallbacks>                          allocator,
                                              Dispatch const &                                             d ) const
  {
    Pipeline pipeline;
    Result   result = static_cast<Result>(
      d.vkCreateRayTracingPipelinesNV( m_device,
                                       static_cast<VkPipelineCache>( pipelineCache ),
                                       1,
                                       reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
                                       reinterpret_cast<const VkAllocationCallbacks *>(
                                         static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                       reinterpret_cast<VkPipeline *>( &pipeline ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<Pipeline, Dispatch>(
      result,
      pipeline,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique",
      { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
      deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                               uint32_t                       firstGroup,
                                               uint32_t                       groupCount,
                                               size_t                         dataSize,
                                               void *                         pData,
                                               Dispatch const &               d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV(
      m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename T, typename Dispatch>
  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV(
      VULKAN_HPP_NAMESPACE::Pipeline pipeline,
      uint32_t                       firstGroup,
      uint32_t                       groupCount,
      ArrayProxy<T> const &          data,
      Dispatch const &               d ) const
  {
    Result result =
      static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
                                                                  static_cast<VkPipeline>( pipeline ),
                                                                  firstGroup,
                                                                  groupCount,
                                                                  data.size() * sizeof( T ),
                                                                  reinterpret_cast<void *>( data.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
  }

  template <typename T, typename Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
                                          Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                               uint32_t                       firstGroup,
                                               uint32_t                       groupCount,
                                               size_t                         dataSize,
                                               Dispatch const &               d ) const
  {
    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
    std::vector<T, Allocator> data( dataSize / sizeof( T ) );
    Result                    result =
      static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
                                                                  static_cast<VkPipeline>( pipeline ),
                                                                  firstGroup,
                                                                  groupCount,
                                                                  data.size() * sizeof( T ),
                                                                  reinterpret_cast<void *>( data.data() ) ) );
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
  }

  template <typename T, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
                                          Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                              uint32_t                       firstGroup,
                                              uint32_t                       groupCount,
                                              Dispatch const &               d ) const
  {
    T      data;
    Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device,
                                                                                static_cast<VkPipeline>( pipeline ),
                                                                                firstGroup,
                                                                                groupCount,
                                                                                sizeof( T ),
                                                                                reinterpret_cast<void *>( &data ) ) );
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
                                              size_t                                        dataSize,
                                              void *                                        pData,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetAccelerationStructureHandleNV(
      m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename T, typename Dispatch>
  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV(
      VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
      ArrayProxy<T> const &                         data,
      Dispatch const &                              d ) const
  {
    Result result = static_cast<Result>(
      d.vkGetAccelerationStructureHandleNV( m_device,
                                            static_cast<VkAccelerationStructureNV>( accelerationStructure ),
                                            data.size() * sizeof( T ),
                                            reinterpret_cast<void *>( data.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
  }

  template <typename T, typename Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
                                          Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
                                              size_t                                        dataSize,
                                              Dispatch const &                              d ) const
  {
    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
    std::vector<T, Allocator> data( dataSize / sizeof( T ) );
    Result                    result = static_cast<Result>(
      d.vkGetAccelerationStructureHandleNV( m_device,
                                            static_cast<VkAccelerationStructureNV>( accelerationStructure ),
                                            data.size() * sizeof( T ),
                                            reinterpret_cast<void *>( data.data() ) ) );
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
  }

  template <typename T, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
                                          Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
                                              Dispatch const &                              d ) const
  {
    T      data;
    Result result = static_cast<Result>(
      d.vkGetAccelerationStructureHandleNV( m_device,
                                            static_cast<VkAccelerationStructureNV>( accelerationStructure ),
                                            sizeof( T ),
                                            reinterpret_cast<void *>( &data ) ) );
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
    uint32_t                                              accelerationStructureCount,
    const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
    VULKAN_HPP_NAMESPACE::QueryType                       queryType,
    VULKAN_HPP_NAMESPACE::QueryPool                       queryPool,
    uint32_t                                              firstQuery,
    Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWriteAccelerationStructuresPropertiesNV(
      m_commandBuffer,
      accelerationStructureCount,
      reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
      static_cast<VkQueryType>( queryType ),
      static_cast<VkQueryPool>( queryPool ),
      firstQuery );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
    ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
    VULKAN_HPP_NAMESPACE::QueryType                                         queryType,
    VULKAN_HPP_NAMESPACE::QueryPool                                         queryPool,
    uint32_t                                                                firstQuery,
    Dispatch const &                                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWriteAccelerationStructuresPropertiesNV(
      m_commandBuffer,
      accelerationStructures.size(),
      reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
      static_cast<VkQueryType>( queryType ),
      static_cast<VkQueryPool>( queryPool ),
      firstQuery );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV(
    VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
  {
    Result result =
      static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_maintenance3 ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
                                              VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport *          pSupport,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetDescriptorSetLayoutSupportKHR( m_device,
                                          reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
                                          reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
                                         Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo,
                                              Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
    d.vkGetDescriptorSetLayoutSupportKHR( m_device,
                                          reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
                                          reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
    return support;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo,
                                              Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                         structureChain;
    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support =
      structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
    d.vkGetDescriptorSetLayoutSupportKHR( m_device,
                                          reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
                                          reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_draw_indirect_count ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                              VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                              VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                                              VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                                              uint32_t                         maxDrawCount,
                                                              uint32_t                         stride,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
                                 static_cast<VkBuffer>( buffer ),
                                 static_cast<VkDeviceSize>( offset ),
                                 static_cast<VkBuffer>( countBuffer ),
                                 static_cast<VkDeviceSize>( countBufferOffset ),
                                 maxDrawCount,
                                 stride );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                                     VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                                     VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                                                     VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                                                     uint32_t                         maxDrawCount,
                                                                     uint32_t                         stride,
                                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
                                        static_cast<VkBuffer>( buffer ),
                                        static_cast<VkDeviceSize>( offset ),
                                        static_cast<VkBuffer>( countBuffer ),
                                        static_cast<VkDeviceSize>( countBufferOffset ),
                                        maxDrawCount,
                                        stride );
  }

  //=== VK_EXT_external_memory_host ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT(
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
    const void *                                           pHostPointer,
    VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
    Dispatch const &                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT(
      m_device,
      static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
      pHostPointer,
      reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
    Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                               const void *                                           pHostPointer,
                                               Dispatch const &                                       d ) const
  {
    VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
    Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT(
      m_device,
      static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
      pHostPointer,
      reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) );
    return createResultValue(
      result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_AMD_buffer_marker ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
                                                              VULKAN_HPP_NAMESPACE::Buffer                dstBuffer,
                                                              VULKAN_HPP_NAMESPACE::DeviceSize            dstOffset,
                                                              uint32_t                                    marker,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
                                 static_cast<VkPipelineStageFlagBits>( pipelineStage ),
                                 static_cast<VkBuffer>( dstBuffer ),
                                 static_cast<VkDeviceSize>( dstOffset ),
                                 marker );
  }

  //=== VK_EXT_calibrated_timestamps ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t *                            pTimeDomainCount,
                                                    VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
                                                    Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
      m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename TimeDomainEXTAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
    PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
  {
    std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
    uint32_t                                           timeDomainCount;
    Result                                             result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && timeDomainCount )
      {
        timeDomains.resize( timeDomainCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
          m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
        VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) )
    {
      timeDomains.resize( timeDomainCount );
    }
    return createResultValue(
      result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
  }

  template <typename TimeDomainEXTAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type
    PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator,
                                                    Dispatch const &         d ) const
  {
    std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
    uint32_t                                           timeDomainCount;
    Result                                             result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && timeDomainCount )
      {
        timeDomains.resize( timeDomainCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
          m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
        VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) )
    {
      timeDomains.resize( timeDomainCount );
    }
    return createResultValue(
      result, timeDomains, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getCalibratedTimestampsEXT( uint32_t                                                 timestampCount,
                                        const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
                                        uint64_t *                                               pTimestamps,
                                        uint64_t *                                               pMaxDeviation,
                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetCalibratedTimestampsEXT( m_device,
                                      timestampCount,
                                      reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ),
                                      pTimestamps,
                                      pMaxDeviation ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
      ArrayProxy<uint64_t> const &                                               timestamps,
      Dispatch const &                                                           d ) const
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() );
#  else
    if ( timestampInfos.size() != timestamps.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/
    uint64_t maxDeviation;
    Result   result = static_cast<Result>(
      d.vkGetCalibratedTimestampsEXT( m_device,
                                      timestampInfos.size(),
                                      reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
                                      timestamps.data(),
                                      &maxDeviation ) );
    return createResultValue(
      result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  }

  template <typename Uint64_tAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
    Device::getCalibratedTimestampsEXT(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
      Dispatch const &                                                           d ) const
  {
    std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
      std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
    std::vector<uint64_t, Uint64_tAllocator> & timestamps   = data.first;
    uint64_t &                                 maxDeviation = data.second;
    Result                                     result       = static_cast<Result>(
      d.vkGetCalibratedTimestampsEXT( m_device,
                                      timestampInfos.size(),
                                      reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
                                      timestamps.data(),
                                      &maxDeviation ) );
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  }

  template <typename Uint64_tAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
    Device::getCalibratedTimestampsEXT(
      ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
      Uint64_tAllocator &                                                        uint64_tAllocator,
      Dispatch const &                                                           d ) const
  {
    std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
      std::piecewise_construct,
      std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ),
      std::forward_as_tuple( 0 ) );
    std::vector<uint64_t, Uint64_tAllocator> & timestamps   = data.first;
    uint64_t &                                 maxDeviation = data.second;
    Result                                     result       = static_cast<Result>(
      d.vkGetCalibratedTimestampsEXT( m_device,
                                      timestampInfos.size(),
                                      reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
                                      timestamps.data(),
                                      &maxDeviation ) );
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_NV_mesh_shader ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t         taskCount,
                                                         uint32_t         firstTask,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                                 VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                                 uint32_t                         drawCount,
                                                                 uint32_t                         stride,
                                                                 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawMeshTasksIndirectNV(
      m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer     buffer,
                                                 VULKAN_HPP_NAMESPACE::DeviceSize offset,
                                                 VULKAN_HPP_NAMESPACE::Buffer     countBuffer,
                                                 VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
                                                 uint32_t                         maxDrawCount,
                                                 uint32_t                         stride,
                                                 Dispatch const &                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
                                         static_cast<VkBuffer>( buffer ),
                                         static_cast<VkDeviceSize>( offset ),
                                         static_cast<VkBuffer>( countBuffer ),
                                         static_cast<VkDeviceSize>( countBufferOffset ),
                                         maxDrawCount,
                                         stride );
  }

  //=== VK_NV_scissor_exclusive ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
                                                               uint32_t exclusiveScissorCount,
                                                               const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetExclusiveScissorNV( m_commandBuffer,
                                  firstExclusiveScissor,
                                  exclusiveScissorCount,
                                  reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setExclusiveScissorNV( uint32_t                                               firstExclusiveScissor,
                                          ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetExclusiveScissorNV( m_commandBuffer,
                                  firstExclusiveScissor,
                                  exclusiveScissors.size(),
                                  reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_NV_device_diagnostic_checkpoints ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void *     pCheckpointMarker,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t *                               pCheckpointDataCount,
                                                     VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetQueueCheckpointDataNV(
      m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename CheckpointDataNVAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
                                         Queue::getCheckpointDataNV( Dispatch const & d ) const
  {
    std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
    uint32_t                                                 checkpointDataCount;
    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
    checkpointData.resize( checkpointDataCount );
    d.vkGetQueueCheckpointDataNV(
      m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
    return checkpointData;
  }

  template <typename CheckpointDataNVAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator>
                                         Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
  {
    std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
    uint32_t                                                 checkpointDataCount;
    d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
    checkpointData.resize( checkpointDataCount );
    d.vkGetQueueCheckpointDataNV(
      m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
    return checkpointData;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_timeline_semaphore ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR(
    VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
                                          Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
  {
    uint64_t value;
    Result   result =
      static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
    return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
                               uint64_t                                        timeout,
                               Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo,
                                                                           uint64_t                  timeout,
                                                                           Dispatch const &          d ) const
  {
    Result result = static_cast<Result>(
      d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR(
    const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_INTEL_performance_query ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL(
    const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo,
    Dispatch const &                                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkInitializePerformanceApiINTEL(
      m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo,
                                           Dispatch const &                          d ) const
  {
    Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL(
      m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkUninitializePerformanceApiINTEL( m_device );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL(
    const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL(
      m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL(
      m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL(
    const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo,
    Dispatch const &                                               d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL(
      m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo,
                                                    Dispatch const &                         d ) const
  {
    Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL(
      m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL(
    const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo,
    Dispatch const &                                           d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL(
      m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo,
                                                Dispatch const &                     d ) const
  {
    Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL(
      m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL(
    const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL *                  pConfiguration,
    Dispatch const &                                                       d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
      m_device,
      reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
      reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
    Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
                                                  Dispatch const &                                 d ) const
  {
    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
    Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
      m_device,
      reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
      reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
    return createResultValue(
      result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
    Device::acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
                                                        Dispatch const &                                 d ) const
  {
    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
    Result                          result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL(
      m_device,
      reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
      reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
    ObjectRelease<Device, Dispatch> deleter( *this, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>(
      result,
      configuration,
      VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique",
      deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL(
    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
      m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
                                                  Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
      m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release(
    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
      m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL(
      m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL(
    VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL(
      m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
                                             Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL(
      m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
                                          VULKAN_HPP_NAMESPACE::PerformanceValueINTEL *       pValue,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetPerformanceParameterINTEL( m_device,
                                        static_cast<VkPerformanceParameterTypeINTEL>( parameter ),
                                        reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
    Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
                                          Dispatch const &                                    d ) const
  {
    VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
    Result                                      result =
      static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device,
                                                             static_cast<VkPerformanceParameterTypeINTEL>( parameter ),
                                                             reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) );
    return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_AMD_display_native_hdr ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
                                                     VULKAN_HPP_NAMESPACE::Bool32       localDimmingEnable,
                                                     Dispatch const &                   d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkSetLocalDimmingAMD(
      m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
  }

#if defined( VK_USE_PLATFORM_FUCHSIA )
  //=== VK_FUCHSIA_imagepipe_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA(
    const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
    const VULKAN_HPP_NAMESPACE::AllocationCallbacks *               pAllocator,
    VULKAN_HPP_NAMESPACE::SurfaceKHR *                              pSurface,
    Dispatch const &                                                d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateImagePipeSurfaceFUCHSIA( m_instance,
                                         reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ),
                                         reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                         reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
                                             Optional<const AllocationCallbacks>       allocator,
                                             Dispatch const &                          d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA(
      m_instance,
      reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
                                                   Optional<const AllocationCallbacks>       allocator,
                                                   Dispatch const &                          d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR  surface;
    Result                            result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA(
      m_instance,
      reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_METAL_EXT )
  //=== VK_EXT_metal_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
                                     const VULKAN_HPP_NAMESPACE::AllocationCallbacks *       pAllocator,
                                     VULKAN_HPP_NAMESPACE::SurfaceKHR *                      pSurface,
                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateMetalSurfaceEXT( m_instance,
                                 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                 reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT &   createInfo,
                                     Optional<const AllocationCallbacks> allocator,
                                     Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateMetalSurfaceEXT( m_instance,
                                 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>(
                                   static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT &   createInfo,
                                           Optional<const AllocationCallbacks> allocator,
                                           Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateMetalSurfaceEXT( m_instance,
                                 reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
                                 reinterpret_cast<const VkAllocationCallbacks *>(
                                   static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                 reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_METAL_EXT*/

  //=== VK_KHR_fragment_shading_rate ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR(
    uint32_t *                                                   pFragmentShadingRateCount,
    VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
    Dispatch const &                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
      m_physicalDevice,
      pFragmentShadingRateCount,
      reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
    PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
  {
    std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>
             fragmentShadingRates;
    uint32_t fragmentShadingRateCount;
    Result   result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
      {
        fragmentShadingRates.resize( fragmentShadingRateCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
          m_physicalDevice,
          &fragmentShadingRateCount,
          reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
        VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) )
    {
      fragmentShadingRates.resize( fragmentShadingRateCount );
    }
    return createResultValue(
      result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
  }

  template <typename PhysicalDeviceFragmentShadingRateKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value,
                                    int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
    PhysicalDevice::getFragmentShadingRatesKHR(
      PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
      Dispatch const &                                d ) const
  {
    std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>
             fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator );
    uint32_t fragmentShadingRateCount;
    Result   result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
      {
        fragmentShadingRates.resize( fragmentShadingRateCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
          m_physicalDevice,
          &fragmentShadingRateCount,
          reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
        VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) )
    {
      fragmentShadingRates.resize( fragmentShadingRateCount );
    }
    return createResultValue(
      result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR(
    const VULKAN_HPP_NAMESPACE::Extent2D *                       pFragmentSize,
    const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
    Dispatch const &                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer,
                                      reinterpret_cast<const VkExtent2D *>( pFragmentSize ),
                                      reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR(
    const Extent2D &                                             fragmentSize,
    const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
    Dispatch const &                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer,
                                      reinterpret_cast<const VkExtent2D *>( &fragmentSize ),
                                      reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_buffer_device_address ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT(
    const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<DeviceAddress>(
      d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_tooling_info ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getToolPropertiesEXT( uint32_t *                                              pToolCount,
                                          VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT * pToolProperties,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
      m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( pToolProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PhysicalDeviceToolPropertiesEXTAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type
    PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
  {
    std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties;
    uint32_t                                                                               toolCount;
    Result                                                                                 result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && toolCount )
      {
        toolProperties.resize( toolCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
          m_physicalDevice,
          &toolCount,
          reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) );
        VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) )
    {
      toolProperties.resize( toolCount );
    }
    return createResultValue(
      result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
  }

  template <
    typename PhysicalDeviceToolPropertiesEXTAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type
    PhysicalDevice::getToolPropertiesEXT(
      PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, Dispatch const & d ) const
  {
    std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties(
      physicalDeviceToolPropertiesEXTAllocator );
    uint32_t toolCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && toolCount )
      {
        toolProperties.resize( toolCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT(
          m_physicalDevice,
          &toolCount,
          reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) );
        VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) )
    {
      toolProperties.resize( toolCount );
    }
    return createResultValue(
      result, toolProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_NV_cooperative_matrix ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t * pPropertyCount,
                                                      VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
      m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
    PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const
  {
    std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
    uint32_t                                                                           propertyCount;
    Result                                                                             result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
          m_physicalDevice,
          &propertyCount,
          reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
  }

  template <
    typename CooperativeMatrixPropertiesNVAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
    PhysicalDevice::getCooperativeMatrixPropertiesNV(
      CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const
  {
    std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
      cooperativeMatrixPropertiesNVAllocator );
    uint32_t propertyCount;
    Result   result;
    do
    {
      result = static_cast<Result>(
        d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && propertyCount )
      {
        properties.resize( propertyCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
          m_physicalDevice,
          &propertyCount,
          reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
    {
      properties.resize( propertyCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_NV_coverage_reduction_mode ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
    uint32_t *                                                   pCombinationCount,
    VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations,
    Dispatch const &                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
      m_physicalDevice,
      pCombinationCount,
      reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
    PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const
  {
    std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
    uint32_t                                                                                         combinationCount;
    Result                                                                                           result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
        m_physicalDevice, &combinationCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && combinationCount )
      {
        combinations.resize( combinationCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
          m_physicalDevice,
          &combinationCount,
          reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
        VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) )
    {
      combinations.resize( combinationCount );
    }
    return createResultValue( result,
                              combinations,
                              VULKAN_HPP_NAMESPACE_STRING
                              "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
  }

  template <typename FramebufferMixedSamplesCombinationNVAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value,
                                    int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
    PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
      FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator,
      Dispatch const &                                d ) const
  {
    std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
      framebufferMixedSamplesCombinationNVAllocator );
    uint32_t combinationCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
        m_physicalDevice, &combinationCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && combinationCount )
      {
        combinations.resize( combinationCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
          m_physicalDevice,
          &combinationCount,
          reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
        VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) )
    {
      combinations.resize( combinationCount );
    }
    return createResultValue( result,
                              combinations,
                              VULKAN_HPP_NAMESPACE_STRING
                              "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  //=== VK_EXT_full_screen_exclusive ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT(
    const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
    uint32_t *                                                  pPresentModeCount,
    VULKAN_HPP_NAMESPACE::PresentModeKHR *                      pPresentModes,
    Dispatch const &                                            d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
      m_physicalDevice,
      reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
      pPresentModeCount,
      reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PresentModeKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
    PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                                Dispatch const &                      d ) const
  {
    std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
    uint32_t                                             presentModeCount;
    Result                                               result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
        m_physicalDevice,
        reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
        &presentModeCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && presentModeCount )
      {
        presentModes.resize( presentModeCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
          m_physicalDevice,
          reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
          &presentModeCount,
          reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
        VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
    {
      presentModes.resize( presentModeCount );
    }
    return createResultValue(
      result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
  }

  template <typename PresentModeKHRAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type
    PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                                PresentModeKHRAllocator &             presentModeKHRAllocator,
                                                Dispatch const &                      d ) const
  {
    std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
    uint32_t                                             presentModeCount;
    Result                                               result;
    do
    {
      result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
        m_physicalDevice,
        reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
        &presentModeCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && presentModeCount )
      {
        presentModes.resize( presentModeCount );
        result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
          m_physicalDevice,
          reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
          &presentModeCount,
          reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
        VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
    {
      presentModes.resize( presentModeCount );
    }
    return createResultValue(
      result, presentModes, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#  ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT(
    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  }
#  else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#  ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT(
    VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  }
#  else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
                                             VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR *      pModes,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
      m_device,
      reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
      reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
    Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
                                             Dispatch const &                      d ) const
  {
    VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
    Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
      m_device,
      reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
      reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
    return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_WIN32_KHR*/

  //=== VK_EXT_headless_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
                                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks *          pAllocator,
                                        VULKAN_HPP_NAMESPACE::SurfaceKHR *                         pSurface,
                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateHeadlessSurfaceEXT( m_instance,
                                    reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                    reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo,
                                        Optional<const AllocationCallbacks>  allocator,
                                        Dispatch const &                     d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateHeadlessSurfaceEXT( m_instance,
                                    reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                    reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo,
                                              Optional<const AllocationCallbacks>  allocator,
                                              Dispatch const &                     d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateHeadlessSurfaceEXT( m_instance,
                                    reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                    reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_buffer_device_address ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR(
    const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<DeviceAddress>(
      d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR(
    const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetBufferOpaqueCaptureAddressKHR( m_device,
                                                 reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info,
                                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetBufferOpaqueCaptureAddressKHR( m_device,
                                                 reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE uint64_t
                    Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR(
      m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR(
    const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR(
      m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_line_rasterization ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t         lineStippleFactor,
                                                           uint16_t         lineStipplePattern,
                                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
  }

  //=== VK_EXT_host_query_reset ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
                                                    uint32_t                        firstQuery,
                                                    uint32_t                        queryCount,
                                                    Dispatch const &                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  }

  //=== VK_EXT_extended_dynamic_state ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace,
                                                         Dispatch const &                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
                                            Dispatch const &                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t                               viewportCount,
                                                                 const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
                                                                 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetViewportWithCountEXT(
      m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetViewportWithCountEXT(
      m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t                             scissorCount,
                                                                const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
                                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetScissorWithCountEXT(
      m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t                                 firstBinding,
                                                               uint32_t                                 bindingCount,
                                                               const VULKAN_HPP_NAMESPACE::Buffer *     pBuffers,
                                                               const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
                                                               const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
                                                               const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
                                  firstBinding,
                                  bindingCount,
                                  reinterpret_cast<const VkBuffer *>( pBuffers ),
                                  reinterpret_cast<const VkDeviceSize *>( pOffsets ),
                                  reinterpret_cast<const VkDeviceSize *>( pSizes ),
                                  reinterpret_cast<const VkDeviceSize *>( pStrides ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::bindVertexBuffers2EXT( uint32_t                                                   firstBinding,
                                          ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const &     buffers,
                                          ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
                                          ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
                                          ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
    VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
    VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
#  else
    if ( buffers.size() != offsets.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
    }
    if ( !sizes.empty() && buffers.size() != sizes.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
    }
    if ( !strides.empty() && buffers.size() != strides.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
                                  firstBinding,
                                  buffers.size(),
                                  reinterpret_cast<const VkBuffer *>( buffers.data() ),
                                  reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
                                  reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
                                  reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable,
                                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
                                                                     Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable,
                                                                 Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
                                                         VULKAN_HPP_NAMESPACE::StencilOp        failOp,
                                                         VULKAN_HPP_NAMESPACE::StencilOp        passOp,
                                                         VULKAN_HPP_NAMESPACE::StencilOp        depthFailOp,
                                                         VULKAN_HPP_NAMESPACE::CompareOp        compareOp,
                                                         Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetStencilOpEXT( m_commandBuffer,
                            static_cast<VkStencilFaceFlags>( faceMask ),
                            static_cast<VkStencilOp>( failOp ),
                            static_cast<VkStencilOp>( passOp ),
                            static_cast<VkStencilOp>( depthFailOp ),
                            static_cast<VkCompareOp>( compareOp ) );
  }

  //=== VK_KHR_deferred_host_operations ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                        VULKAN_HPP_NAMESPACE::DeferredOperationKHR *      pDeferredOperation,
                                        Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateDeferredOperationKHR( m_device,
                                      reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                      reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type
    Device::createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
    Result                                     result = static_cast<Result>(
      d.vkCreateDeferredOperationKHR( m_device,
                                      reinterpret_cast<const VkAllocationCallbacks *>(
                                        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                      reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
    return createResultValue(
      result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
    Device::createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
    Result                                     result = static_cast<Result>(
      d.vkCreateDeferredOperationKHR( m_device,
                                      reinterpret_cast<const VkAllocationCallbacks *>(
                                        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                      reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>(
      result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR        operation,
                                         const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                         Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDeferredOperationKHR( m_device,
                                     static_cast<VkDeferredOperationKHR>( operation ),
                                     reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
                                                              Optional<const AllocationCallbacks>        allocator,
                                                              Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDeferredOperationKHR( m_device,
                                     static_cast<VkDeferredOperationKHR>( operation ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR        operation,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDeferredOperationKHR( m_device,
                                     static_cast<VkDeferredOperationKHR>( operation ),
                                     reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
                                          Optional<const AllocationCallbacks>        allocator,
                                          Dispatch const &                           d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyDeferredOperationKHR( m_device,
                                     static_cast<VkDeferredOperationKHR>( operation ),
                                     reinterpret_cast<const VkAllocationCallbacks *>(
                                       static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR(
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
  }

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR(
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR(
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
  {
    Result result = static_cast<Result>(
      d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR(
    VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
    Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
  {
    Result result =
      static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR,
                                VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_KHR_pipeline_executable_properties ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
                                                uint32_t *                                    pExecutableCount,
                                                VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetPipelineExecutablePropertiesKHR( m_device,
                                              reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
                                              pExecutableCount,
                                              reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
    Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const
  {
    std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
    uint32_t                                                                               executableCount;
    Result                                                                                 result;
    do
    {
      result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
        m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && executableCount )
      {
        properties.resize( executableCount );
        result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
          m_device,
          reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
          &executableCount,
          reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( executableCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) )
    {
      properties.resize( executableCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
  }

  template <
    typename PipelineExecutablePropertiesKHRAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
    std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
    Device::getPipelineExecutablePropertiesKHR(
      const PipelineInfoKHR &                    pipelineInfo,
      PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
      Dispatch const &                           d ) const
  {
    std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
      pipelineExecutablePropertiesKHRAllocator );
    uint32_t executableCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
        m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
      if ( ( result == Result::eSuccess ) && executableCount )
      {
        properties.resize( executableCount );
        result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR(
          m_device,
          reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
          &executableCount,
          reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
        VULKAN_HPP_ASSERT( executableCount <= properties.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) )
    {
      properties.resize( executableCount );
    }
    return createResultValue(
      result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
                                                uint32_t *                                              pStatisticCount,
                                                VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR *  pStatistics,
                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetPipelineExecutableStatisticsKHR( m_device,
                                              reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
                                              pStatisticCount,
                                              reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
    Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo,
                                                Dispatch const &                  d ) const
  {
    std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
    uint32_t                                                                             statisticCount;
    Result                                                                               result;
    do
    {
      result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
        m_device,
        reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
        &statisticCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && statisticCount )
      {
        statistics.resize( statisticCount );
        result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
          m_device,
          reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
          &statisticCount,
          reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
        VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) )
    {
      statistics.resize( statisticCount );
    }
    return createResultValue(
      result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
  }

  template <
    typename PipelineExecutableStatisticKHRAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
    Device::getPipelineExecutableStatisticsKHR(
      const PipelineExecutableInfoKHR &         executableInfo,
      PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
      Dispatch const &                          d ) const
  {
    std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics(
      pipelineExecutableStatisticKHRAllocator );
    uint32_t statisticCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
        m_device,
        reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
        &statisticCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && statisticCount )
      {
        statistics.resize( statisticCount );
        result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR(
          m_device,
          reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
          &statisticCount,
          reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
        VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) )
    {
      statistics.resize( statisticCount );
    }
    return createResultValue(
      result, statistics, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR(
    const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR *             pExecutableInfo,
    uint32_t *                                                          pInternalRepresentationCount,
    VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
    Dispatch const &                                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
      m_device,
      reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
      pInternalRepresentationCount,
      reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
                                         PipelineExecutableInternalRepresentationKHRAllocator>>::type
    Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo,
                                                             Dispatch const &                  d ) const
  {
    std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
             internalRepresentations;
    uint32_t internalRepresentationCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
        m_device,
        reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
        &internalRepresentationCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && internalRepresentationCount )
      {
        internalRepresentations.resize( internalRepresentationCount );
        result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
          m_device,
          reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
          &internalRepresentationCount,
          reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
        VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) )
    {
      internalRepresentations.resize( internalRepresentationCount );
    }
    return createResultValue( result,
                              internalRepresentations,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
  }

  template <
    typename PipelineExecutableInternalRepresentationKHRAllocator,
    typename Dispatch,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value,
                            int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
    typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR,
                                         PipelineExecutableInternalRepresentationKHRAllocator>>::type
    Device::getPipelineExecutableInternalRepresentationsKHR(
      const PipelineExecutableInfoKHR &                      executableInfo,
      PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
      Dispatch const &                                       d ) const
  {
    std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
             internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
    uint32_t internalRepresentationCount;
    Result   result;
    do
    {
      result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
        m_device,
        reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
        &internalRepresentationCount,
        nullptr ) );
      if ( ( result == Result::eSuccess ) && internalRepresentationCount )
      {
        internalRepresentations.resize( internalRepresentationCount );
        result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR(
          m_device,
          reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
          &internalRepresentationCount,
          reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
        VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
      }
    } while ( result == Result::eIncomplete );
    if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) )
    {
      internalRepresentations.resize( internalRepresentationCount );
    }
    return createResultValue( result,
                              internalRepresentations,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_NV_device_generated_commands ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV(
    const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 *                             pMemoryRequirements,
    Dispatch const &                                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetGeneratedCommandsMemoryRequirementsNV(
      m_device,
      reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
      reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
                                         Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
    d.vkGetGeneratedCommandsMemoryRequirementsNV(
      m_device,
      reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
      reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return memoryRequirements;
  }

  template <typename X, typename Y, typename... Z, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
                                         Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    StructureChain<X, Y, Z...>                  structureChain;
    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements =
      structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
    d.vkGetGeneratedCommandsMemoryRequirementsNV(
      m_device,
      reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
      reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
    return structureChain;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV(
    const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
    Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPreprocessGeneratedCommandsNV(
      m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo,
                                                  Dispatch const &                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPreprocessGeneratedCommandsNV(
      m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV(
    VULKAN_HPP_NAMESPACE::Bool32                          isPreprocessed,
    const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
    Dispatch const &                                      d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer,
                                       static_cast<VkBool32>( isPreprocessed ),
                                       reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32    isPreprocessed,
                                               const GeneratedCommandsInfoNV & generatedCommandsInfo,
                                               Dispatch const &                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer,
                                       static_cast<VkBool32>( isPreprocessed ),
                                       reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
                                              VULKAN_HPP_NAMESPACE::Pipeline          pipeline,
                                              uint32_t                                groupIndex,
                                              Dispatch const &                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer,
                                      static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
                                      static_cast<VkPipeline>( pipeline ),
                                      groupIndex );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV(
    const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
    const VULKAN_HPP_NAMESPACE::AllocationCallbacks *                pAllocator,
    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV *                 pIndirectCommandsLayout,
    Dispatch const &                                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateIndirectCommandsLayoutNV( m_device,
                                          reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
                                          reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                          reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
    Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo,
                                            Optional<const AllocationCallbacks>        allocator,
                                            Dispatch const &                           d ) const
  {
    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
    Result                                         result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV(
      m_device,
      reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
    return createResultValue(
      result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
    Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo,
                                                  Optional<const AllocationCallbacks>        allocator,
                                                  Dispatch const &                           d ) const
  {
    VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
    Result                                         result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV(
      m_device,
      reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
    ObjectDestroy<Device, Dispatch>                deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>(
      result,
      indirectCommandsLayout,
      VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique",
      deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV    indirectCommandsLayout,
                                             const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyIndirectCommandsLayoutNV( m_device,
                                         static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
                                         reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
                                             Optional<const AllocationCallbacks>            allocator,
                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyIndirectCommandsLayoutNV(
      m_device,
      static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV    indirectCommandsLayout,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyIndirectCommandsLayoutNV( m_device,
                                         static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
                                         reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
                                          Optional<const AllocationCallbacks>            allocator,
                                          Dispatch const &                               d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyIndirectCommandsLayoutNV(
      m_device,
      static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_EXT_private_data ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT * pCreateInfo,
                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *          pAllocator,
                                      VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT *                 pPrivateDataSlot,
                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreatePrivateDataSlotEXT( m_device,
                                    reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( pCreateInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                    reinterpret_cast<VkPrivateDataSlotEXT *>( pPrivateDataSlot ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type
    Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo,
                                      Optional<const AllocationCallbacks>  allocator,
                                      Dispatch const &                     d ) const
  {
    VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
    Result                                   result = static_cast<Result>(
      d.vkCreatePrivateDataSlotEXT( m_device,
                                    reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                    reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
    return createResultValue(
      result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type
    Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo,
                                            Optional<const AllocationCallbacks>  allocator,
                                            Dispatch const &                     d ) const
  {
    VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
    Result                                   result = static_cast<Result>(
      d.vkCreatePrivateDataSlotEXT( m_device,
                                    reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                    reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>(
      result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT          privateDataSlot,
                                       const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                       Dispatch const &                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPrivateDataSlotEXT( m_device,
                                   static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
                                   reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                                                            Optional<const AllocationCallbacks>      allocator,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPrivateDataSlotEXT( m_device,
                                   static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT          privateDataSlot,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPrivateDataSlotEXT( m_device,
                                   static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
                                   reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                                          Optional<const AllocationCallbacks>      allocator,
                                          Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkDestroyPrivateDataSlotEXT( m_device,
                                   static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
                                   reinterpret_cast<const VkAllocationCallbacks *>(
                                     static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType         objectType,
                               uint64_t                                 objectHandle,
                               VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                               uint64_t                                 data,
                               Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkSetPrivateDataEXT( m_device,
                                                       static_cast<VkObjectType>( objectType ),
                                                       objectHandle,
                                                       static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
                                                       data ) );
  }
#else
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<void>::type
    Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
                               uint64_t objectHandle,
                               VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                               uint64_t data,
                               Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkSetPrivateDataEXT( m_device,
                                                                static_cast<VkObjectType>( objectType ),
                                                                objectHandle,
                                                                static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
                                                                data ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType         objectType,
                                                    uint64_t                                 objectHandle,
                                                    VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                                                    uint64_t *                               pData,
                                                    Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetPrivateDataEXT( m_device,
                           static_cast<VkObjectType>( objectType ),
                           objectHandle,
                           static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
                           pData );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
                                         Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType         objectType,
                               uint64_t                                 objectHandle,
                               VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot,
                               Dispatch const &                         d ) const VULKAN_HPP_NOEXCEPT
  {
    uint64_t data;
    d.vkGetPrivateDataEXT( m_device,
                           static_cast<VkObjectType>( objectType ),
                           objectHandle,
                           static_cast<VkPrivateDataSlotEXT>( privateDataSlot ),
                           &data );
    return data;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  //=== VK_KHR_video_encode_queue ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VideoEncodeInfoKHR & encodeInfo,
                                                        Dispatch const &           d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_ENABLE_BETA_EXTENSIONS*/

  //=== VK_KHR_synchronization2 ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event                     event,
                                                      const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetEvent2KHR( m_commandBuffer,
                         static_cast<VkEvent>( event ),
                         reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
                                                      const DependencyInfoKHR &   dependencyInfo,
                                                      Dispatch const &            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetEvent2KHR( m_commandBuffer,
                         static_cast<VkEvent>( event ),
                         reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event                  event,
                                                        VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stageMask,
                                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdResetEvent2KHR(
      m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2KHR>( stageMask ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::waitEvents2KHR( uint32_t                                        eventCount,
                                   const VULKAN_HPP_NAMESPACE::Event *             pEvents,
                                   const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfos,
                                   Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWaitEvents2KHR( m_commandBuffer,
                           eventCount,
                           reinterpret_cast<const VkEvent *>( pEvents ),
                           reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfos ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const &             events,
                                   ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfoKHR> const & dependencyInfos,
                                   Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  {
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
#  else
    if ( events.size() != dependencyInfos.size() )
    {
      throw LogicError( VULKAN_HPP_NAMESPACE_STRING
                        "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
    }
#  endif /*VULKAN_HPP_NO_EXCEPTIONS*/

    d.vkCmdWaitEvents2KHR( m_commandBuffer,
                           events.size(),
                           reinterpret_cast<const VkEvent *>( events.data() ),
                           reinterpret_cast<const VkDependencyInfoKHR *>( dependencyInfos.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfoKHR * pDependencyInfo,
                                        Dispatch const &                                d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( pDependencyInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const DependencyInfoKHR & dependencyInfo,
                                                             Dispatch const &          d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfoKHR *>( &dependencyInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
                                                            VULKAN_HPP_NAMESPACE::QueryPool              queryPool,
                                                            uint32_t                                     query,
                                                            Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWriteTimestamp2KHR(
      m_commandBuffer, static_cast<VkPipelineStageFlags2KHR>( stage ), static_cast<VkQueryPool>( queryPool ), query );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Queue::submit2KHR( uint32_t                                     submitCount,
                       const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR * pSubmits,
                       VULKAN_HPP_NAMESPACE::Fence                  fence,
                       Dispatch const &                             d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkQueueSubmit2KHR(
      m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2KHR *>( pSubmits ), static_cast<VkFence>( fence ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Queue::submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2KHR> const & submits,
                       VULKAN_HPP_NAMESPACE::Fence                                    fence,
                       Dispatch const &                                               d ) const
  {
    Result result =
      static_cast<Result>( d.vkQueueSubmit2KHR( m_queue,
                                                submits.size(),
                                                reinterpret_cast<const VkSubmitInfo2KHR *>( submits.data() ),
                                                static_cast<VkFence>( fence ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR stage,
                                                               VULKAN_HPP_NAMESPACE::Buffer                 dstBuffer,
                                                               VULKAN_HPP_NAMESPACE::DeviceSize             dstOffset,
                                                               uint32_t                                     marker,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdWriteBufferMarker2AMD( m_commandBuffer,
                                  static_cast<VkPipelineStageFlags2KHR>( stage ),
                                  static_cast<VkBuffer>( dstBuffer ),
                                  static_cast<VkDeviceSize>( dstOffset ),
                                  marker );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t *                                pCheckpointDataCount,
                                                      VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkGetQueueCheckpointData2NV(
      m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename CheckpointData2NVAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
                                         Queue::getCheckpointData2NV( Dispatch const & d ) const
  {
    std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
    uint32_t                                                   checkpointDataCount;
    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
    checkpointData.resize( checkpointDataCount );
    d.vkGetQueueCheckpointData2NV(
      m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
    return checkpointData;
  }

  template <typename CheckpointData2NVAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, CheckpointData2NV>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointData2NV, CheckpointData2NVAllocator>
                                         Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
  {
    std::vector<CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
    uint32_t                                                   checkpointDataCount;
    d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
    checkpointData.resize( checkpointDataCount );
    d.vkGetQueueCheckpointData2NV(
      m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
    VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
    return checkpointData;
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  //=== VK_NV_fragment_shading_rate_enums ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV(
    VULKAN_HPP_NAMESPACE::FragmentShadingRateNV                  shadingRate,
    const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
    Dispatch const &                                             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer,
                                         static_cast<VkFragmentShadingRateNV>( shadingRate ),
                                         reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  }

  //=== VK_KHR_copy_commands2 ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR * pCopyBufferInfo,
                                   Dispatch const &                                 d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( pCopyBufferInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo,
                                                        Dispatch const &           d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( &copyBufferInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR * pCopyImageInfo,
                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( pCopyImageInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo,
                                                       Dispatch const &          d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( &copyImageInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR(
    const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,
    Dispatch const &                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyBufferToImage2KHR( m_commandBuffer,
                                  reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( pCopyBufferToImageInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyBufferToImage2KHR( m_commandBuffer,
                                  reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( &copyBufferToImageInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR(
    const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR * pCopyImageToBufferInfo,
    Dispatch const &                                        d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer,
                                  reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( pCopyImageToBufferInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer,
                                  reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( &copyImageToBufferInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR * pBlitImageInfo,
                                                       Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( pBlitImageInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo,
                                                       Dispatch const &          d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( &blitImageInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR * pResolveImageInfo,
                                     Dispatch const &                                   d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( pResolveImageInfo ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo,
                                                          Dispatch const &             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( &resolveImageInfo ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_WIN32_KHR )
  //=== VK_NV_acquire_winrt_display ===

#  ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV(
    VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  }
#  else
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  {
    Result result =
      static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         PhysicalDevice::getWinrtDisplayNV( uint32_t                           deviceRelativeId,
                                       VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
                                       Dispatch const &                   d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
    PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::DisplayKHR display;
    Result                           result = static_cast<Result>(
      d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
    return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
    PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
  {
    VULKAN_HPP_NAMESPACE::DisplayKHR display;
    Result                           result = static_cast<Result>(
      d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
    ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>(
      result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif     /*VK_USE_PLATFORM_WIN32_KHR*/

#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  //=== VK_EXT_directfb_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
                                        const VULKAN_HPP_NAMESPACE::AllocationCallbacks *          pAllocator,
                                        VULKAN_HPP_NAMESPACE::SurfaceKHR *                         pSurface,
                                        Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateDirectFBSurfaceEXT( m_instance,
                                    reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                    reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo,
                                        Optional<const AllocationCallbacks>  allocator,
                                        Dispatch const &                     d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateDirectFBSurfaceEXT( m_instance,
                                    reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                    reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo,
                                              Optional<const AllocationCallbacks>  allocator,
                                              Dispatch const &                     d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateDirectFBSurfaceEXT( m_instance,
                                    reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
                                    reinterpret_cast<const VkAllocationCallbacks *>(
                                      static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                    reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT(
    uint32_t queueFamilyIndex, IDirectFB * dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Bool32>(
      d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT(
    uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_DIRECTFB_EXT*/

  //=== VK_KHR_ray_tracing_pipeline ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR(
    const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
    const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
    const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
    const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
    uint32_t                                                    width,
    uint32_t                                                    height,
    uint32_t                                                    depth,
    Dispatch const &                                            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdTraceRaysKHR( m_commandBuffer,
                         reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
                         reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
                         reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
                         reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
                         width,
                         height,
                         depth );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
                                                      const StridedDeviceAddressRegionKHR & missShaderBindingTable,
                                                      const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
                                                      const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
                                                      uint32_t                              width,
                                                      uint32_t                              height,
                                                      uint32_t                              depth,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdTraceRaysKHR( m_commandBuffer,
                         reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
                         reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
                         reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
                         reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
                         width,
                         height,
                         depth );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
                                          VULKAN_HPP_NAMESPACE::PipelineCache        pipelineCache,
                                          uint32_t                                   createInfoCount,
                                          const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
                                          const VULKAN_HPP_NAMESPACE::AllocationCallbacks *             pAllocator,
                                          VULKAN_HPP_NAMESPACE::Pipeline *                              pPipelines,
                                          Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateRayTracingPipelinesKHR( m_device,
                                        static_cast<VkDeferredOperationKHR>( deferredOperation ),
                                        static_cast<VkPipelineCache>( pipelineCache ),
                                        createInfoCount,
                                        reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
                                        reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                        reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename PipelineAllocator, typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
                                         Device::createRayTracingPipelinesKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                      deferredOperation,
      VULKAN_HPP_NAMESPACE::PipelineCache                                             pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks>                                             allocator,
      Dispatch const &                                                                d ) const
  {
    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
    Result                                   result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      static_cast<VkPipelineCache>( pipelineCache ),
      createInfos.size(),
      reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    return createResultValue( result,
                              pipelines,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename PipelineAllocator,
            typename Dispatch,
            typename B,
            typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>>
                                         Device::createRayTracingPipelinesKHR(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                      deferredOperation,
      VULKAN_HPP_NAMESPACE::PipelineCache                                             pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks>                                             allocator,
      PipelineAllocator &                                                             pipelineAllocator,
      Dispatch const &                                                                d ) const
  {
    std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
    Result                                   result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      static_cast<VkPipelineCache>( pipelineCache ),
      createInfos.size(),
      reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    return createResultValue( result,
                              pipelines,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline>
                                         Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
                                         VULKAN_HPP_NAMESPACE::PipelineCache        pipelineCache,
                                         const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
                                         Optional<const AllocationCallbacks>                           allocator,
                                         Dispatch const &                                              d ) const
  {
    Pipeline pipeline;
    Result   result = static_cast<Result>(
      d.vkCreateRayTracingPipelinesKHR( m_device,
                                        static_cast<VkDeferredOperationKHR>( deferredOperation ),
                                        static_cast<VkPipelineCache>( pipelineCache ),
                                        1,
                                        reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkPipeline *>( &pipeline ) ) );
    return createResultValue( result,
                              pipeline,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

#  ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch, typename PipelineAllocator>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                                         Device::createRayTracingPipelinesKHRUnique(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                      deferredOperation,
      VULKAN_HPP_NAMESPACE::PipelineCache                                             pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks>                                             allocator,
      Dispatch const &                                                                d ) const
  {
    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
    std::vector<Pipeline>                                            pipelines( createInfos.size() );
    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      static_cast<VkPipelineCache>( pipelineCache ),
      createInfos.size(),
      reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
    {
      uniquePipelines.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
      }
    }
    return createResultValue( result,
                              std::move( uniquePipelines ),
                              VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <
    typename Dispatch,
    typename PipelineAllocator,
    typename B,
    typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>>
                                         Device::createRayTracingPipelinesKHRUnique(
      VULKAN_HPP_NAMESPACE::DeferredOperationKHR                                      deferredOperation,
      VULKAN_HPP_NAMESPACE::PipelineCache                                             pipelineCache,
      ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
      Optional<const AllocationCallbacks>                                             allocator,
      PipelineAllocator &                                                             pipelineAllocator,
      Dispatch const &                                                                d ) const
  {
    std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
    std::vector<Pipeline>                                            pipelines( createInfos.size() );
    Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR(
      m_device,
      static_cast<VkDeferredOperationKHR>( deferredOperation ),
      static_cast<VkPipelineCache>( pipelineCache ),
      createInfos.size(),
      reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
      reinterpret_cast<const VkAllocationCallbacks *>(
        static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
      reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
    if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) ||
         ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
    {
      uniquePipelines.reserve( createInfos.size() );
      ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
      for ( size_t i = 0; i < createInfos.size(); i++ )
      {
        uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
      }
    }
    return createResultValue( result,
                              std::move( uniquePipelines ),
                              VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
                              { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
                                VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  }

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>>
                                         Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
                                               VULKAN_HPP_NAMESPACE::PipelineCache        pipelineCache,
                                               const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
                                               Optional<const AllocationCallbacks>                           allocator,
                                               Dispatch const &                                              d ) const
  {
    Pipeline pipeline;
    Result   result = static_cast<Result>(
      d.vkCreateRayTracingPipelinesKHR( m_device,
                                        static_cast<VkDeferredOperationKHR>( deferredOperation ),
                                        static_cast<VkPipelineCache>( pipelineCache ),
                                        1,
                                        reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
                                        reinterpret_cast<const VkAllocationCallbacks *>(
                                          static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                        reinterpret_cast<VkPipeline *>( &pipeline ) ) );
    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
    return createResultValue<Pipeline, Dispatch>( result,
                                                  pipeline,
                                                  VULKAN_HPP_NAMESPACE_STRING
                                                  "::Device::createRayTracingPipelineKHRUnique",
                                                  { VULKAN_HPP_NAMESPACE::Result::eSuccess,
                                                    VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
                                                    VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
                                                    VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT },
                                                  deleter );
  }
#  endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                                uint32_t                       firstGroup,
                                                uint32_t                       groupCount,
                                                size_t                         dataSize,
                                                void *                         pData,
                                                Dispatch const &               d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR(
      m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename T, typename Dispatch>
  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesKHR(
      VULKAN_HPP_NAMESPACE::Pipeline pipeline,
      uint32_t                       firstGroup,
      uint32_t                       groupCount,
      ArrayProxy<T> const &          data,
      Dispatch const &               d ) const
  {
    Result result =
      static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
                                                                   static_cast<VkPipeline>( pipeline ),
                                                                   firstGroup,
                                                                   groupCount,
                                                                   data.size() * sizeof( T ),
                                                                   reinterpret_cast<void *>( data.data() ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
  }

  template <typename T, typename Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
                                          Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                                uint32_t                       firstGroup,
                                                uint32_t                       groupCount,
                                                size_t                         dataSize,
                                                Dispatch const &               d ) const
  {
    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
    std::vector<T, Allocator> data( dataSize / sizeof( T ) );
    Result                    result =
      static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
                                                                   static_cast<VkPipeline>( pipeline ),
                                                                   firstGroup,
                                                                   groupCount,
                                                                   data.size() * sizeof( T ),
                                                                   reinterpret_cast<void *>( data.data() ) ) );
    return createResultValue(
      result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
  }

  template <typename T, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
                                          Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                               uint32_t                       firstGroup,
                                               uint32_t                       groupCount,
                                               Dispatch const &               d ) const
  {
    T      data;
    Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device,
                                                                                 static_cast<VkPipeline>( pipeline ),
                                                                                 firstGroup,
                                                                                 groupCount,
                                                                                 sizeof( T ),
                                                                                 reinterpret_cast<void *>( &data ) ) );
    return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                                             uint32_t                       firstGroup,
                                                             uint32_t                       groupCount,
                                                             size_t                         dataSize,
                                                             void *                         pData,
                                                             Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
      m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename T, typename Dispatch>
  VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it." )
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<void>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR(
      VULKAN_HPP_NAMESPACE::Pipeline pipeline,
      uint32_t                       firstGroup,
      uint32_t                       groupCount,
      ArrayProxy<T> const &          data,
      Dispatch const &               d ) const
  {
    Result result = static_cast<Result>(
      d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
                                                           static_cast<VkPipeline>( pipeline ),
                                                           firstGroup,
                                                           groupCount,
                                                           data.size() * sizeof( T ),
                                                           reinterpret_cast<void *>( data.data() ) ) );
    return createResultValue( result,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
  }

  template <typename T, typename Allocator, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T, Allocator>>::type
                                          Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                                             uint32_t                       firstGroup,
                                                             uint32_t                       groupCount,
                                                             size_t                         dataSize,
                                                             Dispatch const &               d ) const
  {
    VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
    std::vector<T, Allocator> data( dataSize / sizeof( T ) );
    Result                    result = static_cast<Result>(
      d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
                                                           static_cast<VkPipeline>( pipeline ),
                                                           firstGroup,
                                                           groupCount,
                                                           data.size() * sizeof( T ),
                                                           reinterpret_cast<void *>( data.data() ) ) );
    return createResultValue(
      result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
  }

  template <typename T, typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type
                                          Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
                                                            uint32_t                       firstGroup,
                                                            uint32_t                       groupCount,
                                                            Dispatch const &               d ) const
  {
    T      data;
    Result result =
      static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device,
                                                                                static_cast<VkPipeline>( pipeline ),
                                                                                firstGroup,
                                                                                groupCount,
                                                                                sizeof( T ),
                                                                                reinterpret_cast<void *>( &data ) ) );
    return createResultValue(
      result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR(
    const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
    const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
    const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
    const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
    VULKAN_HPP_NAMESPACE::DeviceAddress                         indirectDeviceAddress,
    Dispatch const &                                            d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdTraceRaysIndirectKHR(
      m_commandBuffer,
      reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
      reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
      reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
      reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
      static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
                                         const StridedDeviceAddressRegionKHR & missShaderBindingTable,
                                         const StridedDeviceAddressRegionKHR & hitShaderBindingTable,
                                         const StridedDeviceAddressRegionKHR & callableShaderBindingTable,
                                         VULKAN_HPP_NAMESPACE::DeviceAddress   indirectDeviceAddress,
                                         Dispatch const &                      d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdTraceRaysIndirectKHR(
      m_commandBuffer,
      reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
      reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
      reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
      reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
      static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE DeviceSize
                    Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline             pipeline,
                                                  uint32_t                                   group,
                                                  VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR(
      m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t         pipelineStackSize,
                                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
  }

  //=== VK_EXT_vertex_input_dynamic_state ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
    uint32_t                                                          vertexBindingDescriptionCount,
    const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT *   pVertexBindingDescriptions,
    uint32_t                                                          vertexAttributeDescriptionCount,
    const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
    Dispatch const &                                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetVertexInputEXT(
      m_commandBuffer,
      vertexBindingDescriptionCount,
      reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
      vertexAttributeDescriptionCount,
      reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
    ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const &   vertexBindingDescriptions,
    ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
    Dispatch const &                                                                    d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetVertexInputEXT(
      m_commandBuffer,
      vertexBindingDescriptions.size(),
      reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
      vertexAttributeDescriptions.size(),
      reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  //=== VK_FUCHSIA_external_memory ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandleFUCHSIA(
    const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
    zx_handle_t *                                                  pZirconHandle,
    Dispatch const &                                               d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA(
      m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
                                          Device::getMemoryZirconHandleFUCHSIA( const MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
                                          Dispatch const &                         d ) const
  {
    zx_handle_t zirconHandle;
    Result      result = static_cast<Result>( d.vkGetMemoryZirconHandleFUCHSIA(
      m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle ) );
    return createResultValue(
      result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryZirconHandlePropertiesFUCHSIA(
    VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits      handleType,
    zx_handle_t                                                 zirconHandle,
    VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
    Dispatch const &                                            d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA(
      m_device,
      static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
      zirconHandle,
      reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
    Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
                                                    zx_handle_t                                            zirconHandle,
                                                    Dispatch const &                                       d ) const
  {
    VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
    Result result = static_cast<Result>( d.vkGetMemoryZirconHandlePropertiesFUCHSIA(
      m_device,
      static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
      zirconHandle,
      reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) ) );
    return createResultValue( result,
                              memoryZirconHandleProperties,
                              VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_FUCHSIA*/

#if defined( VK_USE_PLATFORM_FUCHSIA )
  //=== VK_FUCHSIA_external_semaphore ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA(
    const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo,
    Dispatch const &                                                     d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
      m_device,
      reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
                                          Device::importSemaphoreZirconHandleFUCHSIA(
      const ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, Dispatch const & d ) const
  {
    Result result = static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
      m_device,
      reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) ) );
    return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreZirconHandleFUCHSIA(
    const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
    zx_handle_t *                                                     pZirconHandle,
    Dispatch const &                                                  d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA(
      m_device,
      reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ),
      pZirconHandle ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
                                          Device::getSemaphoreZirconHandleFUCHSIA( const SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo,
                                             Dispatch const &                            d ) const
  {
    zx_handle_t zirconHandle;
    Result      result = static_cast<Result>( d.vkGetSemaphoreZirconHandleFUCHSIA(
      m_device,
      reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ),
      &zirconHandle ) );
    return createResultValue(
      result, zirconHandle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_FUCHSIA*/

  //=== VK_EXT_extended_dynamic_state2 ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t         patchControlPoints,
                                                                  Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
                                                  Dispatch const &             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable,
                                                               Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp,
                                                       Dispatch const &              d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
  }

  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
                                                 Dispatch const &             d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
  }

#if defined( VK_USE_PLATFORM_SCREEN_QNX )
  //=== VK_QNX_screen_surface ===

  template <typename Dispatch>
  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
                                         Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
                                      const VULKAN_HPP_NAMESPACE::AllocationCallbacks *        pAllocator,
                                      VULKAN_HPP_NAMESPACE::SurfaceKHR *                       pSurface,
                                      Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Result>(
      d.vkCreateScreenSurfaceQNX( m_instance,
                                  reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
                                  reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
                                  reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
    Instance::createScreenSurfaceQNX( const ScreenSurfaceCreateInfoQNX &  createInfo,
                                      Optional<const AllocationCallbacks> allocator,
                                      Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateScreenSurfaceQNX( m_instance,
                                  reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
  }

#    ifndef VULKAN_HPP_NO_SMART_HANDLE
  template <typename Dispatch>
  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE
    typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
    Instance::createScreenSurfaceQNXUnique( const ScreenSurfaceCreateInfoQNX &  createInfo,
                                            Optional<const AllocationCallbacks> allocator,
                                            Dispatch const &                    d ) const
  {
    VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
    Result                           result = static_cast<Result>(
      d.vkCreateScreenSurfaceQNX( m_instance,
                                  reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
                                  reinterpret_cast<const VkAllocationCallbacks *>(
                                    static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
                                  reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
    ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
    return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>(
      result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique", deleter );
  }
#    endif /*VULKAN_HPP_NO_SMART_HANDLE*/
#  endif   /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/

  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX(
    uint32_t queueFamilyIndex, struct _screen_window * window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return static_cast<Bool32>(
      d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
  }

#  ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX(
    uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    return d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
  }
#  endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#endif   /*VK_USE_PLATFORM_SCREEN_QNX*/

  //=== VK_EXT_color_write_enable ===

  template <typename Dispatch>
  VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t                             attachmentCount,
                                                                const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
                                                                Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetColorWriteEnableEXT(
      m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
  }

#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  template <typename Dispatch>
  VULKAN_HPP_INLINE void
    CommandBuffer::setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
                                           Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  {
    d.vkCmdSetColorWriteEnableEXT(
      m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
  }
#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  template <>
  struct StructExtends<AndroidHardwareBufferFormatPropertiesANDROID, AndroidHardwareBufferPropertiesANDROID>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  template <>
  struct StructExtends<AndroidHardwareBufferUsageANDROID, ImageFormatProperties2>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  template <>
  struct StructExtends<AttachmentDescriptionStencilLayout, AttachmentDescription2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<AttachmentReferenceStencilLayout, AttachmentReference2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<BindBufferMemoryDeviceGroupInfo, BindBufferMemoryInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<BindImageMemoryDeviceGroupInfo, BindImageMemoryInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<BindImageMemorySwapchainInfoKHR, BindImageMemoryInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<BindImagePlaneMemoryInfo, BindImageMemoryInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<BufferDeviceAddressCreateInfoEXT, BufferCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<BufferOpaqueCaptureAddressCreateInfo, BufferCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<CommandBufferInheritanceConditionalRenderingInfoEXT, CommandBufferInheritanceInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<CommandBufferInheritanceRenderPassTransformInfoQCOM, CommandBufferInheritanceInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<CommandBufferInheritanceViewportScissorInfoNV, CommandBufferInheritanceInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<CopyCommandTransformInfoQCOM, BufferImageCopy2KHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<CopyCommandTransformInfoQCOM, ImageBlit2KHR>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<D3D12FenceSubmitInfoKHR, SubmitInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
  template <>
  struct StructExtends<DebugReportCallbackCreateInfoEXT, InstanceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DebugUtilsMessengerCreateInfoEXT, InstanceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DedicatedAllocationBufferCreateInfoNV, BufferCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DedicatedAllocationImageCreateInfoNV, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DedicatedAllocationMemoryAllocateInfoNV, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DescriptorPoolInlineUniformBlockCreateInfoEXT, DescriptorPoolCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DescriptorSetLayoutBindingFlagsCreateInfo, DescriptorSetLayoutCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DescriptorSetVariableDescriptorCountAllocateInfo, DescriptorSetAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DescriptorSetVariableDescriptorCountLayoutSupport, DescriptorSetLayoutSupport>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceDeviceMemoryReportCreateInfoEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceDiagnosticsConfigCreateInfoNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceGroupBindSparseInfo, BindSparseInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceGroupCommandBufferBeginInfo, CommandBufferBeginInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceGroupDeviceCreateInfo, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceGroupPresentInfoKHR, PresentInfoKHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceGroupRenderPassBeginInfo, RenderPassBeginInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceGroupSubmitInfo, SubmitInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceGroupSwapchainCreateInfoKHR, SwapchainCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceMemoryOverallocationCreateInfoAMD, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DevicePrivateDataCreateInfoEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DeviceQueueGlobalPriorityCreateInfoEXT, DeviceQueueCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DisplayNativeHdrSurfaceCapabilitiesAMD, SurfaceCapabilities2KHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DisplayPresentInfoKHR, PresentInfoKHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<DrmFormatModifierPropertiesListEXT, FormatProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ExportFenceCreateInfo, FenceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<ExportFenceWin32HandleInfoKHR, FenceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
  template <>
  struct StructExtends<ExportMemoryAllocateInfo, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ExportMemoryAllocateInfoNV, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<ExportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<ExportMemoryWin32HandleInfoNV, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
  template <>
  struct StructExtends<ExportSemaphoreCreateInfo, SemaphoreCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<ExportSemaphoreWin32HandleInfoKHR, SemaphoreCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  template <>
  struct StructExtends<ExternalFormatANDROID, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ExternalFormatANDROID, SamplerYcbcrConversionCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  template <>
  struct StructExtends<ExternalImageFormatProperties, ImageFormatProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ExternalMemoryBufferCreateInfo, BufferCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ExternalMemoryImageCreateInfo, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ExternalMemoryImageCreateInfoNV, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<FilterCubicImageViewImageFormatPropertiesEXT, ImageFormatProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<FragmentShadingRateAttachmentInfoKHR, SubpassDescription2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<FramebufferAttachmentsCreateInfo, FramebufferCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<GraphicsPipelineShaderGroupsCreateInfoNV, GraphicsPipelineCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageDrmFormatModifierExplicitCreateInfoEXT, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageDrmFormatModifierListCreateInfoEXT, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageFormatListCreateInfo, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageFormatListCreateInfo, SwapchainCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageFormatListCreateInfo, PhysicalDeviceImageFormatInfo2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImagePlaneMemoryRequirementsInfo, ImageMemoryRequirementsInfo2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageStencilUsageCreateInfo, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageStencilUsageCreateInfo, PhysicalDeviceImageFormatInfo2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageSwapchainCreateInfoKHR, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageViewASTCDecodeModeEXT, ImageViewCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImageViewUsageCreateInfo, ImageViewCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
  template <>
  struct StructExtends<ImportAndroidHardwareBufferInfoANDROID, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  template <>
  struct StructExtends<ImportMemoryFdInfoKHR, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ImportMemoryHostPointerInfoEXT, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<ImportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
  template <>
  struct StructExtends<ImportMemoryZirconHandleInfoFUCHSIA, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_FUCHSIA*/
  template <>
  struct StructExtends<MemoryAllocateFlagsInfo, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<MemoryBarrier2KHR, SubpassDependency2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<MemoryDedicatedAllocateInfo, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<MemoryDedicatedRequirements, MemoryRequirements2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<MemoryOpaqueCaptureAddressAllocateInfo, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<MemoryPriorityAllocateInfoEXT, MemoryAllocateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorSetLayoutCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorPoolCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo2KHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevice16BitStorageFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevice16BitStorageFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevice8BitStorageFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevice8BitStorageFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceAccelerationStructureFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceAccelerationStructureFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceAccelerationStructurePropertiesKHR, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceBlendOperationAdvancedPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceColorWriteEnableFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceColorWriteEnableFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceConservativeRasterizationPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCooperativeMatrixPropertiesNV, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceCustomBorderColorPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDepthStencilResolveProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDescriptorIndexingProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDiscardRectanglePropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceDriverProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceExtendedDynamicState2FeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceExtendedDynamicState2FeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceExternalImageFormatInfo, PhysicalDeviceImageFormatInfo2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceExternalMemoryHostPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFeatures2, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFloatControlsProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentDensityMap2PropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentDensityMapPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceFragmentShadingRatePropertiesKHR, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceGlobalPriorityQueryFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceHostQueryResetFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceHostQueryResetFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceIDProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceImageDrmFormatModifierInfoEXT, PhysicalDeviceImageFormatInfo2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceImageRobustnessFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceImageRobustnessFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceImageViewImageFormatInfoEXT, PhysicalDeviceImageFormatInfo2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceInheritedViewportScissorFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceInheritedViewportScissorFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceInlineUniformBlockFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceInlineUniformBlockFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceInlineUniformBlockPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceLineRasterizationPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMaintenance3Properties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMemoryBudgetPropertiesEXT, PhysicalDeviceMemoryProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMeshShaderPropertiesNV, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMultiviewFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMultiviewFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMultiviewProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePCIBusInfoPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePerformanceQueryPropertiesKHR, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePointClippingProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<PhysicalDevicePortabilitySubsetPropertiesKHR, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
  template <>
  struct StructExtends<PhysicalDevicePrivateDataFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePrivateDataFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceProtectedMemoryProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceProvokingVertexFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceProvokingVertexFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceProvokingVertexPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDevicePushDescriptorPropertiesKHR, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRayQueryFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRayQueryFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRayTracingPipelineFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRayTracingPipelineFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRayTracingPipelinePropertiesKHR, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRayTracingPropertiesNV, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceRobustness2PropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSampleLocationsPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSamplerFilterMinmaxProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderCoreProperties2AMD, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderCorePropertiesAMD, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderSMBuiltinsPropertiesNV, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceShadingRateImagePropertiesNV, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSubgroupProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSubgroupSizeControlPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSynchronization2FeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceSynchronization2FeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTexelBufferAlignmentPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTimelineSemaphoreProperties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceTransformFeedbackPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVariablePointersFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVariablePointersFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVertexAttributeDivisorPropertiesEXT, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVulkan11Features, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVulkan11Features, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVulkan11Properties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVulkan12Features, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVulkan12Features, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVulkan12Properties, PhysicalDeviceProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR, PhysicalDeviceFeatures2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR, DeviceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineColorBlendAdvancedStateCreateInfoEXT, PipelineColorBlendStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineColorWriteCreateInfoEXT, PipelineColorBlendStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineCompilerControlCreateInfoAMD, GraphicsPipelineCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineCompilerControlCreateInfoAMD, ComputePipelineCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineCoverageModulationStateCreateInfoNV, PipelineMultisampleStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineCoverageReductionStateCreateInfoNV, PipelineMultisampleStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineCoverageToColorStateCreateInfoNV, PipelineMultisampleStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, GraphicsPipelineCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, ComputePipelineCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, RayTracingPipelineCreateInfoNV>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, RayTracingPipelineCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineDiscardRectangleStateCreateInfoEXT, GraphicsPipelineCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineFragmentShadingRateEnumStateCreateInfoNV, GraphicsPipelineCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineFragmentShadingRateStateCreateInfoKHR, GraphicsPipelineCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineRasterizationConservativeStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineRasterizationDepthClipStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineRasterizationLineStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineRasterizationProvokingVertexStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineRasterizationStateRasterizationOrderAMD, PipelineRasterizationStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineRasterizationStateStreamCreateInfoEXT, PipelineRasterizationStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineRepresentativeFragmentTestStateCreateInfoNV, GraphicsPipelineCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineSampleLocationsStateCreateInfoEXT, PipelineMultisampleStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, PipelineShaderStageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineTessellationDomainOriginStateCreateInfo, PipelineTessellationStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineVertexInputDivisorStateCreateInfoEXT, PipelineVertexInputStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineViewportCoarseSampleOrderStateCreateInfoNV, PipelineViewportStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineViewportExclusiveScissorStateCreateInfoNV, PipelineViewportStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineViewportShadingRateImageStateCreateInfoNV, PipelineViewportStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineViewportSwizzleStateCreateInfoNV, PipelineViewportStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PipelineViewportWScalingStateCreateInfoNV, PipelineViewportStateCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_USE_PLATFORM_GGP )
  template <>
  struct StructExtends<PresentFrameTokenGGP, PresentInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_GGP*/
  template <>
  struct StructExtends<PresentRegionsKHR, PresentInfoKHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<PresentTimesInfoGOOGLE, PresentInfoKHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ProtectedSubmitInfo, SubmitInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<QueryPoolPerformanceCreateInfoKHR, QueryPoolCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<QueryPoolPerformanceQueryCreateInfoINTEL, QueryPoolCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<QueueFamilyCheckpointProperties2NV, QueueFamilyProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<QueueFamilyCheckpointPropertiesNV, QueueFamilyProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<QueueFamilyGlobalPriorityPropertiesEXT, QueueFamilyProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<RenderPassAttachmentBeginInfo, RenderPassBeginInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<RenderPassInputAttachmentAspectCreateInfo, RenderPassCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<RenderPassMultiviewCreateInfo, RenderPassCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<RenderPassSampleLocationsBeginInfoEXT, RenderPassBeginInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<RenderPassTransformBeginInfoQCOM, RenderPassBeginInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier2KHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SamplerCustomBorderColorCreateInfoEXT, SamplerCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SamplerReductionModeCreateInfo, SamplerCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SamplerYcbcrConversionImageFormatProperties, ImageFormatProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SamplerYcbcrConversionInfo, SamplerCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SamplerYcbcrConversionInfo, ImageViewCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SemaphoreTypeCreateInfo, SemaphoreCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SemaphoreTypeCreateInfo, PhysicalDeviceExternalSemaphoreInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ShaderModuleValidationCacheCreateInfoEXT, ShaderModuleCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SharedPresentSurfaceCapabilitiesKHR, SurfaceCapabilities2KHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SubpassDescriptionDepthStencilResolve, SubpassDescription2>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<SurfaceCapabilitiesFullScreenExclusiveEXT, SurfaceCapabilities2KHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, PhysicalDeviceSurfaceInfo2KHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, SwapchainCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, PhysicalDeviceSurfaceInfo2KHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, SwapchainCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
  template <>
  struct StructExtends<SurfaceProtectedCapabilitiesKHR, SurfaceCapabilities2KHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SwapchainCounterCreateInfoEXT, SwapchainCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<SwapchainDisplayNativeHdrCreateInfoAMD, SwapchainCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<TextureLODGatherFormatPropertiesAMD, ImageFormatProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<TimelineSemaphoreSubmitInfo, SubmitInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<TimelineSemaphoreSubmitInfo, BindSparseInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ValidationFeaturesEXT, InstanceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH264CapabilitiesEXT, VideoCapabilitiesKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH264DpbSlotInfoEXT, VideoReferenceSlotKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH264MvcEXT, VideoDecodeH264PictureInfoEXT>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH264PictureInfoEXT, VideoDecodeInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH264ProfileEXT, VideoProfileKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH264SessionCreateInfoEXT, VideoSessionCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH264SessionParametersAddInfoEXT, VideoSessionParametersUpdateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH264SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH265CapabilitiesEXT, VideoCapabilitiesKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH265DpbSlotInfoEXT, VideoReferenceSlotKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH265PictureInfoEXT, VideoDecodeInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH265ProfileEXT, VideoProfileKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH265SessionCreateInfoEXT, VideoSessionCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH265SessionParametersAddInfoEXT, VideoSessionParametersUpdateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoDecodeH265SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoEncodeH264CapabilitiesEXT, VideoCapabilitiesKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoEncodeH264EmitPictureParametersEXT, VideoEncodeInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoEncodeH264ProfileEXT, VideoProfileKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoEncodeH264SessionCreateInfoEXT, VideoSessionCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoEncodeH264SessionParametersAddInfoEXT, VideoSessionParametersUpdateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoEncodeH264SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoEncodeH264VclFrameInfoEXT, VideoEncodeInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoEncodeRateControlInfoKHR, VideoCodingControlInfoKHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoProfileKHR, QueryPoolCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<VideoProfileKHR, FormatProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<VideoProfileKHR, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<VideoProfileKHR, ImageViewCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<VideoProfileKHR, BufferCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoProfilesKHR, FormatProperties2>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<VideoProfilesKHR, ImageCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<VideoProfilesKHR, ImageViewCreateInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<VideoProfilesKHR, BufferCreateInfo>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct StructExtends<VideoQueueFamilyProperties2KHR, QueueFamilyProperties2>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo2KHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
  template <>
  struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo2KHR>
  {
    enum
    {
      value = true
    };
  };
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
  template <>
  struct StructExtends<WriteDescriptorSetAccelerationStructureKHR, WriteDescriptorSet>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<WriteDescriptorSetAccelerationStructureNV, WriteDescriptorSet>
  {
    enum
    {
      value = true
    };
  };
  template <>
  struct StructExtends<WriteDescriptorSetInlineUniformBlockEXT, WriteDescriptorSet>
  {
    enum
    {
      value = true
    };
  };

#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
  class DynamicLoader
  {
  public:
#  ifdef VULKAN_HPP_NO_EXCEPTIONS
    DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT
#  else
    DynamicLoader( std::string const & vulkanLibraryName = {} )
#  endif
    {
      if ( !vulkanLibraryName.empty() )
      {
#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
        m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL );
#  elif defined( _WIN32 )
        m_library = ::LoadLibraryA( vulkanLibraryName.c_str() );
#  else
#    error unsupported platform
#  endif
      }
      else
      {
#  if defined( __unix__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
        m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL );
        if ( m_library == nullptr )
        {
          m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL );
        }
#  elif defined( __APPLE__ )
        m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
#  elif defined( _WIN32 )
        m_library = ::LoadLibraryA( "vulkan-1.dll" );
#  else
#    error unsupported platform
#  endif
      }

#  ifndef VULKAN_HPP_NO_EXCEPTIONS
      if ( m_library == nullptr )
      {
        // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the
        // scope of this function.
        throw std::runtime_error( "Failed to load vulkan library!" );
      }
#  endif
    }

    DynamicLoader( DynamicLoader const & ) = delete;

    DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library )
    {
      other.m_library = nullptr;
    }

    DynamicLoader & operator=( DynamicLoader const & ) = delete;

    DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT
    {
      std::swap( m_library, other.m_library );
      return *this;
    }

    ~DynamicLoader() VULKAN_HPP_NOEXCEPT
    {
      if ( m_library )
      {
#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
        dlclose( m_library );
#  elif defined( _WIN32 )
        ::FreeLibrary( m_library );
#  else
#    error unsupported platform
#  endif
      }
    }

    template <typename T>
    T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT
    {
#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
      return (T)dlsym( m_library, function );
#  elif defined( _WIN32 )
      return ( T )::GetProcAddress( m_library, function );
#  else
#    error unsupported platform
#  endif
    }

    bool success() const VULKAN_HPP_NOEXCEPT
    {
      return m_library != nullptr;
    }

  private:
#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ )
    void * m_library;
#  elif defined( _WIN32 )
    ::HINSTANCE m_library;
#  else
#    error unsupported platform
#  endif
  };
#endif

  class DispatchLoaderDynamic
  {
  public:
    using PFN_dummy = void ( * )();

#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
#else
    PFN_dummy placeholder_dont_call_vkAcquireFullScreenExclusiveModeEXT = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    PFN_vkAcquireNextImage2KHR                 vkAcquireNextImage2KHR                 = 0;
    PFN_vkAcquireNextImageKHR                  vkAcquireNextImageKHR                  = 0;
    PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
    PFN_vkAcquireProfilingLockKHR              vkAcquireProfilingLockKHR              = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0;
#else
    PFN_dummy placeholder_dont_call_vkAcquireWinrtDisplayNV = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
    PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
#else
    PFN_dummy placeholder_dont_call_vkAcquireXlibDisplayEXT = 0;
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
    PFN_vkAllocateCommandBuffers            vkAllocateCommandBuffers            = 0;
    PFN_vkAllocateDescriptorSets            vkAllocateDescriptorSets            = 0;
    PFN_vkAllocateMemory                    vkAllocateMemory                    = 0;
    PFN_vkBeginCommandBuffer                vkBeginCommandBuffer                = 0;
    PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
    PFN_vkBindBufferMemory                  vkBindBufferMemory                  = 0;
    PFN_vkBindBufferMemory2                 vkBindBufferMemory2                 = 0;
    PFN_vkBindBufferMemory2KHR              vkBindBufferMemory2KHR              = 0;
    PFN_vkBindImageMemory                   vkBindImageMemory                   = 0;
    PFN_vkBindImageMemory2                  vkBindImageMemory2                  = 0;
    PFN_vkBindImageMemory2KHR               vkBindImageMemory2KHR               = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkBindVideoSessionMemoryKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    PFN_vkBuildAccelerationStructuresKHR  vkBuildAccelerationStructuresKHR  = 0;
    PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
    PFN_vkCmdBeginDebugUtilsLabelEXT      vkCmdBeginDebugUtilsLabelEXT      = 0;
    PFN_vkCmdBeginQuery                   vkCmdBeginQuery                   = 0;
    PFN_vkCmdBeginQueryIndexedEXT         vkCmdBeginQueryIndexedEXT         = 0;
    PFN_vkCmdBeginRenderPass              vkCmdBeginRenderPass              = 0;
    PFN_vkCmdBeginRenderPass2             vkCmdBeginRenderPass2             = 0;
    PFN_vkCmdBeginRenderPass2KHR          vkCmdBeginRenderPass2KHR          = 0;
    PFN_vkCmdBeginTransformFeedbackEXT    vkCmdBeginTransformFeedbackEXT    = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCmdBeginVideoCodingKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    PFN_vkCmdBindDescriptorSets                     vkCmdBindDescriptorSets                     = 0;
    PFN_vkCmdBindIndexBuffer                        vkCmdBindIndexBuffer                        = 0;
    PFN_vkCmdBindPipeline                           vkCmdBindPipeline                           = 0;
    PFN_vkCmdBindPipelineShaderGroupNV              vkCmdBindPipelineShaderGroupNV              = 0;
    PFN_vkCmdBindShadingRateImageNV                 vkCmdBindShadingRateImageNV                 = 0;
    PFN_vkCmdBindTransformFeedbackBuffersEXT        vkCmdBindTransformFeedbackBuffersEXT        = 0;
    PFN_vkCmdBindVertexBuffers                      vkCmdBindVertexBuffers                      = 0;
    PFN_vkCmdBindVertexBuffers2EXT                  vkCmdBindVertexBuffers2EXT                  = 0;
    PFN_vkCmdBlitImage                              vkCmdBlitImage                              = 0;
    PFN_vkCmdBlitImage2KHR                          vkCmdBlitImage2KHR                          = 0;
    PFN_vkCmdBuildAccelerationStructureNV           vkCmdBuildAccelerationStructureNV           = 0;
    PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0;
    PFN_vkCmdBuildAccelerationStructuresKHR         vkCmdBuildAccelerationStructuresKHR         = 0;
    PFN_vkCmdClearAttachments                       vkCmdClearAttachments                       = 0;
    PFN_vkCmdClearColorImage                        vkCmdClearColorImage                        = 0;
    PFN_vkCmdClearDepthStencilImage                 vkCmdClearDepthStencilImage                 = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCmdControlVideoCodingKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    PFN_vkCmdCopyAccelerationStructureKHR         vkCmdCopyAccelerationStructureKHR         = 0;
    PFN_vkCmdCopyAccelerationStructureNV          vkCmdCopyAccelerationStructureNV          = 0;
    PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0;
    PFN_vkCmdCopyBuffer                           vkCmdCopyBuffer                           = 0;
    PFN_vkCmdCopyBuffer2KHR                       vkCmdCopyBuffer2KHR                       = 0;
    PFN_vkCmdCopyBufferToImage                    vkCmdCopyBufferToImage                    = 0;
    PFN_vkCmdCopyBufferToImage2KHR                vkCmdCopyBufferToImage2KHR                = 0;
    PFN_vkCmdCopyImage                            vkCmdCopyImage                            = 0;
    PFN_vkCmdCopyImage2KHR                        vkCmdCopyImage2KHR                        = 0;
    PFN_vkCmdCopyImageToBuffer                    vkCmdCopyImageToBuffer                    = 0;
    PFN_vkCmdCopyImageToBuffer2KHR                vkCmdCopyImageToBuffer2KHR                = 0;
    PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0;
    PFN_vkCmdCopyQueryPoolResults                 vkCmdCopyQueryPoolResults                 = 0;
    PFN_vkCmdCuLaunchKernelNVX                    vkCmdCuLaunchKernelNVX                    = 0;
    PFN_vkCmdDebugMarkerBeginEXT                  vkCmdDebugMarkerBeginEXT                  = 0;
    PFN_vkCmdDebugMarkerEndEXT                    vkCmdDebugMarkerEndEXT                    = 0;
    PFN_vkCmdDebugMarkerInsertEXT                 vkCmdDebugMarkerInsertEXT                 = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCmdDecodeVideoKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    PFN_vkCmdDispatch                     vkCmdDispatch                     = 0;
    PFN_vkCmdDispatchBase                 vkCmdDispatchBase                 = 0;
    PFN_vkCmdDispatchBaseKHR              vkCmdDispatchBaseKHR              = 0;
    PFN_vkCmdDispatchIndirect             vkCmdDispatchIndirect             = 0;
    PFN_vkCmdDraw                         vkCmdDraw                         = 0;
    PFN_vkCmdDrawIndexed                  vkCmdDrawIndexed                  = 0;
    PFN_vkCmdDrawIndexedIndirect          vkCmdDrawIndexedIndirect          = 0;
    PFN_vkCmdDrawIndexedIndirectCount     vkCmdDrawIndexedIndirectCount     = 0;
    PFN_vkCmdDrawIndexedIndirectCountAMD  vkCmdDrawIndexedIndirectCountAMD  = 0;
    PFN_vkCmdDrawIndexedIndirectCountKHR  vkCmdDrawIndexedIndirectCountKHR  = 0;
    PFN_vkCmdDrawIndirect                 vkCmdDrawIndirect                 = 0;
    PFN_vkCmdDrawIndirectByteCountEXT     vkCmdDrawIndirectByteCountEXT     = 0;
    PFN_vkCmdDrawIndirectCount            vkCmdDrawIndirectCount            = 0;
    PFN_vkCmdDrawIndirectCountAMD         vkCmdDrawIndirectCountAMD         = 0;
    PFN_vkCmdDrawIndirectCountKHR         vkCmdDrawIndirectCountKHR         = 0;
    PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
    PFN_vkCmdDrawMeshTasksIndirectNV      vkCmdDrawMeshTasksIndirectNV      = 0;
    PFN_vkCmdDrawMeshTasksNV              vkCmdDrawMeshTasksNV              = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCmdEncodeVideoKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0;
    PFN_vkCmdEndDebugUtilsLabelEXT      vkCmdEndDebugUtilsLabelEXT      = 0;
    PFN_vkCmdEndQuery                   vkCmdEndQuery                   = 0;
    PFN_vkCmdEndQueryIndexedEXT         vkCmdEndQueryIndexedEXT         = 0;
    PFN_vkCmdEndRenderPass              vkCmdEndRenderPass              = 0;
    PFN_vkCmdEndRenderPass2             vkCmdEndRenderPass2             = 0;
    PFN_vkCmdEndRenderPass2KHR          vkCmdEndRenderPass2KHR          = 0;
    PFN_vkCmdEndTransformFeedbackEXT    vkCmdEndTransformFeedbackEXT    = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCmdEndVideoCodingKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    PFN_vkCmdExecuteCommands                          vkCmdExecuteCommands                          = 0;
    PFN_vkCmdExecuteGeneratedCommandsNV               vkCmdExecuteGeneratedCommandsNV               = 0;
    PFN_vkCmdFillBuffer                               vkCmdFillBuffer                               = 0;
    PFN_vkCmdInsertDebugUtilsLabelEXT                 vkCmdInsertDebugUtilsLabelEXT                 = 0;
    PFN_vkCmdNextSubpass                              vkCmdNextSubpass                              = 0;
    PFN_vkCmdNextSubpass2                             vkCmdNextSubpass2                             = 0;
    PFN_vkCmdNextSubpass2KHR                          vkCmdNextSubpass2KHR                          = 0;
    PFN_vkCmdPipelineBarrier                          vkCmdPipelineBarrier                          = 0;
    PFN_vkCmdPipelineBarrier2KHR                      vkCmdPipelineBarrier2KHR                      = 0;
    PFN_vkCmdPreprocessGeneratedCommandsNV            vkCmdPreprocessGeneratedCommandsNV            = 0;
    PFN_vkCmdPushConstants                            vkCmdPushConstants                            = 0;
    PFN_vkCmdPushDescriptorSetKHR                     vkCmdPushDescriptorSetKHR                     = 0;
    PFN_vkCmdPushDescriptorSetWithTemplateKHR         vkCmdPushDescriptorSetWithTemplateKHR         = 0;
    PFN_vkCmdResetEvent                               vkCmdResetEvent                               = 0;
    PFN_vkCmdResetEvent2KHR                           vkCmdResetEvent2KHR                           = 0;
    PFN_vkCmdResetQueryPool                           vkCmdResetQueryPool                           = 0;
    PFN_vkCmdResolveImage                             vkCmdResolveImage                             = 0;
    PFN_vkCmdResolveImage2KHR                         vkCmdResolveImage2KHR                         = 0;
    PFN_vkCmdSetBlendConstants                        vkCmdSetBlendConstants                        = 0;
    PFN_vkCmdSetCheckpointNV                          vkCmdSetCheckpointNV                          = 0;
    PFN_vkCmdSetCoarseSampleOrderNV                   vkCmdSetCoarseSampleOrderNV                   = 0;
    PFN_vkCmdSetColorWriteEnableEXT                   vkCmdSetColorWriteEnableEXT                   = 0;
    PFN_vkCmdSetCullModeEXT                           vkCmdSetCullModeEXT                           = 0;
    PFN_vkCmdSetDepthBias                             vkCmdSetDepthBias                             = 0;
    PFN_vkCmdSetDepthBiasEnableEXT                    vkCmdSetDepthBiasEnableEXT                    = 0;
    PFN_vkCmdSetDepthBounds                           vkCmdSetDepthBounds                           = 0;
    PFN_vkCmdSetDepthBoundsTestEnableEXT              vkCmdSetDepthBoundsTestEnableEXT              = 0;
    PFN_vkCmdSetDepthCompareOpEXT                     vkCmdSetDepthCompareOpEXT                     = 0;
    PFN_vkCmdSetDepthTestEnableEXT                    vkCmdSetDepthTestEnableEXT                    = 0;
    PFN_vkCmdSetDepthWriteEnableEXT                   vkCmdSetDepthWriteEnableEXT                   = 0;
    PFN_vkCmdSetDeviceMask                            vkCmdSetDeviceMask                            = 0;
    PFN_vkCmdSetDeviceMaskKHR                         vkCmdSetDeviceMaskKHR                         = 0;
    PFN_vkCmdSetDiscardRectangleEXT                   vkCmdSetDiscardRectangleEXT                   = 0;
    PFN_vkCmdSetEvent                                 vkCmdSetEvent                                 = 0;
    PFN_vkCmdSetEvent2KHR                             vkCmdSetEvent2KHR                             = 0;
    PFN_vkCmdSetExclusiveScissorNV                    vkCmdSetExclusiveScissorNV                    = 0;
    PFN_vkCmdSetFragmentShadingRateEnumNV             vkCmdSetFragmentShadingRateEnumNV             = 0;
    PFN_vkCmdSetFragmentShadingRateKHR                vkCmdSetFragmentShadingRateKHR                = 0;
    PFN_vkCmdSetFrontFaceEXT                          vkCmdSetFrontFaceEXT                          = 0;
    PFN_vkCmdSetLineStippleEXT                        vkCmdSetLineStippleEXT                        = 0;
    PFN_vkCmdSetLineWidth                             vkCmdSetLineWidth                             = 0;
    PFN_vkCmdSetLogicOpEXT                            vkCmdSetLogicOpEXT                            = 0;
    PFN_vkCmdSetPatchControlPointsEXT                 vkCmdSetPatchControlPointsEXT                 = 0;
    PFN_vkCmdSetPerformanceMarkerINTEL                vkCmdSetPerformanceMarkerINTEL                = 0;
    PFN_vkCmdSetPerformanceOverrideINTEL              vkCmdSetPerformanceOverrideINTEL              = 0;
    PFN_vkCmdSetPerformanceStreamMarkerINTEL          vkCmdSetPerformanceStreamMarkerINTEL          = 0;
    PFN_vkCmdSetPrimitiveRestartEnableEXT             vkCmdSetPrimitiveRestartEnableEXT             = 0;
    PFN_vkCmdSetPrimitiveTopologyEXT                  vkCmdSetPrimitiveTopologyEXT                  = 0;
    PFN_vkCmdSetRasterizerDiscardEnableEXT            vkCmdSetRasterizerDiscardEnableEXT            = 0;
    PFN_vkCmdSetRayTracingPipelineStackSizeKHR        vkCmdSetRayTracingPipelineStackSizeKHR        = 0;
    PFN_vkCmdSetSampleLocationsEXT                    vkCmdSetSampleLocationsEXT                    = 0;
    PFN_vkCmdSetScissor                               vkCmdSetScissor                               = 0;
    PFN_vkCmdSetScissorWithCountEXT                   vkCmdSetScissorWithCountEXT                   = 0;
    PFN_vkCmdSetStencilCompareMask                    vkCmdSetStencilCompareMask                    = 0;
    PFN_vkCmdSetStencilOpEXT                          vkCmdSetStencilOpEXT                          = 0;
    PFN_vkCmdSetStencilReference                      vkCmdSetStencilReference                      = 0;
    PFN_vkCmdSetStencilTestEnableEXT                  vkCmdSetStencilTestEnableEXT                  = 0;
    PFN_vkCmdSetStencilWriteMask                      vkCmdSetStencilWriteMask                      = 0;
    PFN_vkCmdSetVertexInputEXT                        vkCmdSetVertexInputEXT                        = 0;
    PFN_vkCmdSetViewport                              vkCmdSetViewport                              = 0;
    PFN_vkCmdSetViewportShadingRatePaletteNV          vkCmdSetViewportShadingRatePaletteNV          = 0;
    PFN_vkCmdSetViewportWScalingNV                    vkCmdSetViewportWScalingNV                    = 0;
    PFN_vkCmdSetViewportWithCountEXT                  vkCmdSetViewportWithCountEXT                  = 0;
    PFN_vkCmdTraceRaysIndirectKHR                     vkCmdTraceRaysIndirectKHR                     = 0;
    PFN_vkCmdTraceRaysKHR                             vkCmdTraceRaysKHR                             = 0;
    PFN_vkCmdTraceRaysNV                              vkCmdTraceRaysNV                              = 0;
    PFN_vkCmdUpdateBuffer                             vkCmdUpdateBuffer                             = 0;
    PFN_vkCmdWaitEvents                               vkCmdWaitEvents                               = 0;
    PFN_vkCmdWaitEvents2KHR                           vkCmdWaitEvents2KHR                           = 0;
    PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
    PFN_vkCmdWriteAccelerationStructuresPropertiesNV  vkCmdWriteAccelerationStructuresPropertiesNV  = 0;
    PFN_vkCmdWriteBufferMarker2AMD                    vkCmdWriteBufferMarker2AMD                    = 0;
    PFN_vkCmdWriteBufferMarkerAMD                     vkCmdWriteBufferMarkerAMD                     = 0;
    PFN_vkCmdWriteTimestamp                           vkCmdWriteTimestamp                           = 0;
    PFN_vkCmdWriteTimestamp2KHR                       vkCmdWriteTimestamp2KHR                       = 0;
    PFN_vkCompileDeferredNV                           vkCompileDeferredNV                           = 0;
    PFN_vkCopyAccelerationStructureKHR                vkCopyAccelerationStructureKHR                = 0;
    PFN_vkCopyAccelerationStructureToMemoryKHR        vkCopyAccelerationStructureToMemoryKHR        = 0;
    PFN_vkCopyMemoryToAccelerationStructureKHR        vkCopyMemoryToAccelerationStructureKHR        = 0;
    PFN_vkCreateAccelerationStructureKHR              vkCreateAccelerationStructureKHR              = 0;
    PFN_vkCreateAccelerationStructureNV               vkCreateAccelerationStructureNV               = 0;
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
    PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateAndroidSurfaceKHR = 0;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
    PFN_vkCreateBuffer                      vkCreateBuffer                      = 0;
    PFN_vkCreateBufferView                  vkCreateBufferView                  = 0;
    PFN_vkCreateCommandPool                 vkCreateCommandPool                 = 0;
    PFN_vkCreateComputePipelines            vkCreateComputePipelines            = 0;
    PFN_vkCreateCuFunctionNVX               vkCreateCuFunctionNVX               = 0;
    PFN_vkCreateCuModuleNVX                 vkCreateCuModuleNVX                 = 0;
    PFN_vkCreateDebugReportCallbackEXT      vkCreateDebugReportCallbackEXT      = 0;
    PFN_vkCreateDebugUtilsMessengerEXT      vkCreateDebugUtilsMessengerEXT      = 0;
    PFN_vkCreateDeferredOperationKHR        vkCreateDeferredOperationKHR        = 0;
    PFN_vkCreateDescriptorPool              vkCreateDescriptorPool              = 0;
    PFN_vkCreateDescriptorSetLayout         vkCreateDescriptorSetLayout         = 0;
    PFN_vkCreateDescriptorUpdateTemplate    vkCreateDescriptorUpdateTemplate    = 0;
    PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
    PFN_vkCreateDevice                      vkCreateDevice                      = 0;
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
    PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateDirectFBSurfaceEXT = 0;
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
    PFN_vkCreateDisplayModeKHR         vkCreateDisplayModeKHR         = 0;
    PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
    PFN_vkCreateEvent                  vkCreateEvent                  = 0;
    PFN_vkCreateFence                  vkCreateFence                  = 0;
    PFN_vkCreateFramebuffer            vkCreateFramebuffer            = 0;
    PFN_vkCreateGraphicsPipelines      vkCreateGraphicsPipelines      = 0;
    PFN_vkCreateHeadlessSurfaceEXT     vkCreateHeadlessSurfaceEXT     = 0;
#if defined( VK_USE_PLATFORM_IOS_MVK )
    PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateIOSSurfaceMVK = 0;
#endif /*VK_USE_PLATFORM_IOS_MVK*/
    PFN_vkCreateImage vkCreateImage = 0;
#if defined( VK_USE_PLATFORM_FUCHSIA )
    PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateImagePipeSurfaceFUCHSIA = 0;
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    PFN_vkCreateImageView                vkCreateImageView                = 0;
    PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0;
    PFN_vkCreateInstance                 vkCreateInstance                 = 0;
#if defined( VK_USE_PLATFORM_MACOS_MVK )
    PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateMacOSSurfaceMVK = 0;
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
    PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateMetalSurfaceEXT = 0;
#endif /*VK_USE_PLATFORM_METAL_EXT*/
    PFN_vkCreatePipelineCache             vkCreatePipelineCache             = 0;
    PFN_vkCreatePipelineLayout            vkCreatePipelineLayout            = 0;
    PFN_vkCreatePrivateDataSlotEXT        vkCreatePrivateDataSlotEXT        = 0;
    PFN_vkCreateQueryPool                 vkCreateQueryPool                 = 0;
    PFN_vkCreateRayTracingPipelinesKHR    vkCreateRayTracingPipelinesKHR    = 0;
    PFN_vkCreateRayTracingPipelinesNV     vkCreateRayTracingPipelinesNV     = 0;
    PFN_vkCreateRenderPass                vkCreateRenderPass                = 0;
    PFN_vkCreateRenderPass2               vkCreateRenderPass2               = 0;
    PFN_vkCreateRenderPass2KHR            vkCreateRenderPass2KHR            = 0;
    PFN_vkCreateSampler                   vkCreateSampler                   = 0;
    PFN_vkCreateSamplerYcbcrConversion    vkCreateSamplerYcbcrConversion    = 0;
    PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
    PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateScreenSurfaceQNX = 0;
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
    PFN_vkCreateSemaphore           vkCreateSemaphore           = 0;
    PFN_vkCreateShaderModule        vkCreateShaderModule        = 0;
    PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
#if defined( VK_USE_PLATFORM_GGP )
    PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateStreamDescriptorSurfaceGGP = 0;
#endif /*VK_USE_PLATFORM_GGP*/
    PFN_vkCreateSwapchainKHR       vkCreateSwapchainKHR       = 0;
    PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
#if defined( VK_USE_PLATFORM_VI_NN )
    PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateViSurfaceNN = 0;
#endif /*VK_USE_PLATFORM_VI_NN*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateVideoSessionKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateVideoSessionParametersKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
    PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateWaylandSurfaceKHR = 0;
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateWin32SurfaceKHR = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
    PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateXcbSurfaceKHR = 0;
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_XLIB_KHR )
    PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkCreateXlibSurfaceKHR = 0;
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
    PFN_vkDebugMarkerSetObjectNameEXT        vkDebugMarkerSetObjectNameEXT        = 0;
    PFN_vkDebugMarkerSetObjectTagEXT         vkDebugMarkerSetObjectTagEXT         = 0;
    PFN_vkDebugReportMessageEXT              vkDebugReportMessageEXT              = 0;
    PFN_vkDeferredOperationJoinKHR           vkDeferredOperationJoinKHR           = 0;
    PFN_vkDestroyAccelerationStructureKHR    vkDestroyAccelerationStructureKHR    = 0;
    PFN_vkDestroyAccelerationStructureNV     vkDestroyAccelerationStructureNV     = 0;
    PFN_vkDestroyBuffer                      vkDestroyBuffer                      = 0;
    PFN_vkDestroyBufferView                  vkDestroyBufferView                  = 0;
    PFN_vkDestroyCommandPool                 vkDestroyCommandPool                 = 0;
    PFN_vkDestroyCuFunctionNVX               vkDestroyCuFunctionNVX               = 0;
    PFN_vkDestroyCuModuleNVX                 vkDestroyCuModuleNVX                 = 0;
    PFN_vkDestroyDebugReportCallbackEXT      vkDestroyDebugReportCallbackEXT      = 0;
    PFN_vkDestroyDebugUtilsMessengerEXT      vkDestroyDebugUtilsMessengerEXT      = 0;
    PFN_vkDestroyDeferredOperationKHR        vkDestroyDeferredOperationKHR        = 0;
    PFN_vkDestroyDescriptorPool              vkDestroyDescriptorPool              = 0;
    PFN_vkDestroyDescriptorSetLayout         vkDestroyDescriptorSetLayout         = 0;
    PFN_vkDestroyDescriptorUpdateTemplate    vkDestroyDescriptorUpdateTemplate    = 0;
    PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
    PFN_vkDestroyDevice                      vkDestroyDevice                      = 0;
    PFN_vkDestroyEvent                       vkDestroyEvent                       = 0;
    PFN_vkDestroyFence                       vkDestroyFence                       = 0;
    PFN_vkDestroyFramebuffer                 vkDestroyFramebuffer                 = 0;
    PFN_vkDestroyImage                       vkDestroyImage                       = 0;
    PFN_vkDestroyImageView                   vkDestroyImageView                   = 0;
    PFN_vkDestroyIndirectCommandsLayoutNV    vkDestroyIndirectCommandsLayoutNV    = 0;
    PFN_vkDestroyInstance                    vkDestroyInstance                    = 0;
    PFN_vkDestroyPipeline                    vkDestroyPipeline                    = 0;
    PFN_vkDestroyPipelineCache               vkDestroyPipelineCache               = 0;
    PFN_vkDestroyPipelineLayout              vkDestroyPipelineLayout              = 0;
    PFN_vkDestroyPrivateDataSlotEXT          vkDestroyPrivateDataSlotEXT          = 0;
    PFN_vkDestroyQueryPool                   vkDestroyQueryPool                   = 0;
    PFN_vkDestroyRenderPass                  vkDestroyRenderPass                  = 0;
    PFN_vkDestroySampler                     vkDestroySampler                     = 0;
    PFN_vkDestroySamplerYcbcrConversion      vkDestroySamplerYcbcrConversion      = 0;
    PFN_vkDestroySamplerYcbcrConversionKHR   vkDestroySamplerYcbcrConversionKHR   = 0;
    PFN_vkDestroySemaphore                   vkDestroySemaphore                   = 0;
    PFN_vkDestroyShaderModule                vkDestroyShaderModule                = 0;
    PFN_vkDestroySurfaceKHR                  vkDestroySurfaceKHR                  = 0;
    PFN_vkDestroySwapchainKHR                vkDestroySwapchainKHR                = 0;
    PFN_vkDestroyValidationCacheEXT          vkDestroyValidationCacheEXT          = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkDestroyVideoSessionKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkDestroyVideoSessionParametersKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    PFN_vkDeviceWaitIdle                       vkDeviceWaitIdle                       = 0;
    PFN_vkDisplayPowerControlEXT               vkDisplayPowerControlEXT               = 0;
    PFN_vkEndCommandBuffer                     vkEndCommandBuffer                     = 0;
    PFN_vkEnumerateDeviceExtensionProperties   vkEnumerateDeviceExtensionProperties   = 0;
    PFN_vkEnumerateDeviceLayerProperties       vkEnumerateDeviceLayerProperties       = 0;
    PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
    PFN_vkEnumerateInstanceLayerProperties     vkEnumerateInstanceLayerProperties     = 0;
    PFN_vkEnumerateInstanceVersion             vkEnumerateInstanceVersion             = 0;
    PFN_vkEnumeratePhysicalDeviceGroups        vkEnumeratePhysicalDeviceGroups        = 0;
    PFN_vkEnumeratePhysicalDeviceGroupsKHR     vkEnumeratePhysicalDeviceGroupsKHR     = 0;
    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR
                                                       vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
    PFN_vkEnumeratePhysicalDevices                     vkEnumeratePhysicalDevices                     = 0;
    PFN_vkFlushMappedMemoryRanges                      vkFlushMappedMemoryRanges                      = 0;
    PFN_vkFreeCommandBuffers                           vkFreeCommandBuffers                           = 0;
    PFN_vkFreeDescriptorSets                           vkFreeDescriptorSets                           = 0;
    PFN_vkFreeMemory                                   vkFreeMemory                                   = 0;
    PFN_vkGetAccelerationStructureBuildSizesKHR        vkGetAccelerationStructureBuildSizesKHR        = 0;
    PFN_vkGetAccelerationStructureDeviceAddressKHR     vkGetAccelerationStructureDeviceAddressKHR     = 0;
    PFN_vkGetAccelerationStructureHandleNV             vkGetAccelerationStructureHandleNV             = 0;
    PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
    PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetAndroidHardwareBufferPropertiesANDROID = 0;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
    PFN_vkGetBufferDeviceAddress                         vkGetBufferDeviceAddress                         = 0;
    PFN_vkGetBufferDeviceAddressEXT                      vkGetBufferDeviceAddressEXT                      = 0;
    PFN_vkGetBufferDeviceAddressKHR                      vkGetBufferDeviceAddressKHR                      = 0;
    PFN_vkGetBufferMemoryRequirements                    vkGetBufferMemoryRequirements                    = 0;
    PFN_vkGetBufferMemoryRequirements2                   vkGetBufferMemoryRequirements2                   = 0;
    PFN_vkGetBufferMemoryRequirements2KHR                vkGetBufferMemoryRequirements2KHR                = 0;
    PFN_vkGetBufferOpaqueCaptureAddress                  vkGetBufferOpaqueCaptureAddress                  = 0;
    PFN_vkGetBufferOpaqueCaptureAddressKHR               vkGetBufferOpaqueCaptureAddressKHR               = 0;
    PFN_vkGetCalibratedTimestampsEXT                     vkGetCalibratedTimestampsEXT                     = 0;
    PFN_vkGetDeferredOperationMaxConcurrencyKHR          vkGetDeferredOperationMaxConcurrencyKHR          = 0;
    PFN_vkGetDeferredOperationResultKHR                  vkGetDeferredOperationResultKHR                  = 0;
    PFN_vkGetDescriptorSetLayoutSupport                  vkGetDescriptorSetLayoutSupport                  = 0;
    PFN_vkGetDescriptorSetLayoutSupportKHR               vkGetDescriptorSetLayoutSupportKHR               = 0;
    PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0;
    PFN_vkGetDeviceGroupPeerMemoryFeatures               vkGetDeviceGroupPeerMemoryFeatures               = 0;
    PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR            vkGetDeviceGroupPeerMemoryFeaturesKHR            = 0;
    PFN_vkGetDeviceGroupPresentCapabilitiesKHR           vkGetDeviceGroupPresentCapabilitiesKHR           = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetDeviceGroupSurfacePresentModes2EXT = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    PFN_vkGetDeviceGroupSurfacePresentModesKHR   vkGetDeviceGroupSurfacePresentModesKHR   = 0;
    PFN_vkGetDeviceMemoryCommitment              vkGetDeviceMemoryCommitment              = 0;
    PFN_vkGetDeviceMemoryOpaqueCaptureAddress    vkGetDeviceMemoryOpaqueCaptureAddress    = 0;
    PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
    PFN_vkGetDeviceProcAddr                      vkGetDeviceProcAddr                      = 0;
    PFN_vkGetDeviceQueue                         vkGetDeviceQueue                         = 0;
    PFN_vkGetDeviceQueue2                        vkGetDeviceQueue2                        = 0;
    PFN_vkGetDisplayModeProperties2KHR           vkGetDisplayModeProperties2KHR           = 0;
    PFN_vkGetDisplayModePropertiesKHR            vkGetDisplayModePropertiesKHR            = 0;
    PFN_vkGetDisplayPlaneCapabilities2KHR        vkGetDisplayPlaneCapabilities2KHR        = 0;
    PFN_vkGetDisplayPlaneCapabilitiesKHR         vkGetDisplayPlaneCapabilitiesKHR         = 0;
    PFN_vkGetDisplayPlaneSupportedDisplaysKHR    vkGetDisplayPlaneSupportedDisplaysKHR    = 0;
    PFN_vkGetEventStatus                         vkGetEventStatus                         = 0;
    PFN_vkGetFenceFdKHR                          vkGetFenceFdKHR                          = 0;
    PFN_vkGetFenceStatus                         vkGetFenceStatus                         = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetFenceWin32HandleKHR = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0;
    PFN_vkGetImageDrmFormatModifierPropertiesEXT   vkGetImageDrmFormatModifierPropertiesEXT   = 0;
    PFN_vkGetImageMemoryRequirements               vkGetImageMemoryRequirements               = 0;
    PFN_vkGetImageMemoryRequirements2              vkGetImageMemoryRequirements2              = 0;
    PFN_vkGetImageMemoryRequirements2KHR           vkGetImageMemoryRequirements2KHR           = 0;
    PFN_vkGetImageSparseMemoryRequirements         vkGetImageSparseMemoryRequirements         = 0;
    PFN_vkGetImageSparseMemoryRequirements2        vkGetImageSparseMemoryRequirements2        = 0;
    PFN_vkGetImageSparseMemoryRequirements2KHR     vkGetImageSparseMemoryRequirements2KHR     = 0;
    PFN_vkGetImageSubresourceLayout                vkGetImageSubresourceLayout                = 0;
    PFN_vkGetImageViewAddressNVX                   vkGetImageViewAddressNVX                   = 0;
    PFN_vkGetImageViewHandleNVX                    vkGetImageViewHandleNVX                    = 0;
    PFN_vkGetInstanceProcAddr                      vkGetInstanceProcAddr                      = 0;
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
    PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetMemoryAndroidHardwareBufferANDROID = 0;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
    PFN_vkGetMemoryFdKHR                    vkGetMemoryFdKHR                    = 0;
    PFN_vkGetMemoryFdPropertiesKHR          vkGetMemoryFdPropertiesKHR          = 0;
    PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetMemoryWin32HandleKHR = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetMemoryWin32HandleNV = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetMemoryWin32HandlePropertiesKHR = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
    PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetMemoryZirconHandleFUCHSIA = 0;
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
    PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetMemoryZirconHandlePropertiesFUCHSIA = 0;
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    PFN_vkGetPastPresentationTimingGOOGLE                vkGetPastPresentationTimingGOOGLE                = 0;
    PFN_vkGetPerformanceParameterINTEL                   vkGetPerformanceParameterINTEL                   = 0;
    PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT   vkGetPhysicalDeviceCalibrateableTimeDomainsEXT   = 0;
    PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
    PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
    PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR      vkGetPhysicalDeviceDisplayPlaneProperties2KHR      = 0;
    PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR       vkGetPhysicalDeviceDisplayPlanePropertiesKHR       = 0;
    PFN_vkGetPhysicalDeviceDisplayProperties2KHR           vkGetPhysicalDeviceDisplayProperties2KHR           = 0;
    PFN_vkGetPhysicalDeviceDisplayPropertiesKHR            vkGetPhysicalDeviceDisplayPropertiesKHR            = 0;
    PFN_vkGetPhysicalDeviceExternalBufferProperties        vkGetPhysicalDeviceExternalBufferProperties        = 0;
    PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR     vkGetPhysicalDeviceExternalBufferPropertiesKHR     = 0;
    PFN_vkGetPhysicalDeviceExternalFenceProperties         vkGetPhysicalDeviceExternalFenceProperties         = 0;
    PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR      vkGetPhysicalDeviceExternalFencePropertiesKHR      = 0;
    PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties     vkGetPhysicalDeviceExternalSemaphoreProperties     = 0;
    PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR  vkGetPhysicalDeviceExternalSemaphorePropertiesKHR  = 0;
    PFN_vkGetPhysicalDeviceFeatures                        vkGetPhysicalDeviceFeatures                        = 0;
    PFN_vkGetPhysicalDeviceFeatures2                       vkGetPhysicalDeviceFeatures2                       = 0;
    PFN_vkGetPhysicalDeviceFeatures2KHR                    vkGetPhysicalDeviceFeatures2KHR                    = 0;
    PFN_vkGetPhysicalDeviceFormatProperties                vkGetPhysicalDeviceFormatProperties                = 0;
    PFN_vkGetPhysicalDeviceFormatProperties2               vkGetPhysicalDeviceFormatProperties2               = 0;
    PFN_vkGetPhysicalDeviceFormatProperties2KHR            vkGetPhysicalDeviceFormatProperties2KHR            = 0;
    PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR         vkGetPhysicalDeviceFragmentShadingRatesKHR         = 0;
    PFN_vkGetPhysicalDeviceImageFormatProperties           vkGetPhysicalDeviceImageFormatProperties           = 0;
    PFN_vkGetPhysicalDeviceImageFormatProperties2          vkGetPhysicalDeviceImageFormatProperties2          = 0;
    PFN_vkGetPhysicalDeviceImageFormatProperties2KHR       vkGetPhysicalDeviceImageFormatProperties2KHR       = 0;
    PFN_vkGetPhysicalDeviceMemoryProperties                vkGetPhysicalDeviceMemoryProperties                = 0;
    PFN_vkGetPhysicalDeviceMemoryProperties2               vkGetPhysicalDeviceMemoryProperties2               = 0;
    PFN_vkGetPhysicalDeviceMemoryProperties2KHR            vkGetPhysicalDeviceMemoryProperties2KHR            = 0;
    PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT        vkGetPhysicalDeviceMultisamplePropertiesEXT        = 0;
    PFN_vkGetPhysicalDevicePresentRectanglesKHR            vkGetPhysicalDevicePresentRectanglesKHR            = 0;
    PFN_vkGetPhysicalDeviceProperties                      vkGetPhysicalDeviceProperties                      = 0;
    PFN_vkGetPhysicalDeviceProperties2                     vkGetPhysicalDeviceProperties2                     = 0;
    PFN_vkGetPhysicalDeviceProperties2KHR                  vkGetPhysicalDeviceProperties2KHR                  = 0;
    PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR
                                                     vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
    PFN_vkGetPhysicalDeviceQueueFamilyProperties     vkGetPhysicalDeviceQueueFamilyProperties                = 0;
    PFN_vkGetPhysicalDeviceQueueFamilyProperties2    vkGetPhysicalDeviceQueueFamilyProperties2               = 0;
    PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR            = 0;
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
    PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceScreenPresentationSupportQNX = 0;
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
    PFN_vkGetPhysicalDeviceSparseImageFormatProperties     vkGetPhysicalDeviceSparseImageFormatProperties     = 0;
    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2    vkGetPhysicalDeviceSparseImageFormatProperties2    = 0;
    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
    PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV
                                                   vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
    PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
    PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
    PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR  vkGetPhysicalDeviceSurfaceCapabilitiesKHR  = 0;
    PFN_vkGetPhysicalDeviceSurfaceFormats2KHR      vkGetPhysicalDeviceSurfaceFormats2KHR      = 0;
    PFN_vkGetPhysicalDeviceSurfaceFormatsKHR       vkGetPhysicalDeviceSurfaceFormatsKHR       = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
    PFN_vkGetPhysicalDeviceSurfaceSupportKHR      vkGetPhysicalDeviceSurfaceSupportKHR      = 0;
    PFN_vkGetPhysicalDeviceToolPropertiesEXT      vkGetPhysicalDeviceToolPropertiesEXT      = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceVideoCapabilitiesKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
    PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
    PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_XLIB_KHR )
    PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
    PFN_vkGetPipelineCacheData                            vkGetPipelineCacheData                            = 0;
    PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
    PFN_vkGetPipelineExecutablePropertiesKHR              vkGetPipelineExecutablePropertiesKHR              = 0;
    PFN_vkGetPipelineExecutableStatisticsKHR              vkGetPipelineExecutableStatisticsKHR              = 0;
    PFN_vkGetPrivateDataEXT                               vkGetPrivateDataEXT                               = 0;
    PFN_vkGetQueryPoolResults                             vkGetQueryPoolResults                             = 0;
    PFN_vkGetQueueCheckpointData2NV                       vkGetQueueCheckpointData2NV                       = 0;
    PFN_vkGetQueueCheckpointDataNV                        vkGetQueueCheckpointDataNV                        = 0;
#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
    PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetRandROutputDisplayEXT = 0;
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
    PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0;
    PFN_vkGetRayTracingShaderGroupHandlesKHR              vkGetRayTracingShaderGroupHandlesKHR              = 0;
    PFN_vkGetRayTracingShaderGroupHandlesNV               vkGetRayTracingShaderGroupHandlesNV               = 0;
    PFN_vkGetRayTracingShaderGroupStackSizeKHR            vkGetRayTracingShaderGroupStackSizeKHR            = 0;
    PFN_vkGetRefreshCycleDurationGOOGLE                   vkGetRefreshCycleDurationGOOGLE                   = 0;
    PFN_vkGetRenderAreaGranularity                        vkGetRenderAreaGranularity                        = 0;
    PFN_vkGetSemaphoreCounterValue                        vkGetSemaphoreCounterValue                        = 0;
    PFN_vkGetSemaphoreCounterValueKHR                     vkGetSemaphoreCounterValueKHR                     = 0;
    PFN_vkGetSemaphoreFdKHR                               vkGetSemaphoreFdKHR                               = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetSemaphoreWin32HandleKHR = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
    PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetSemaphoreZirconHandleFUCHSIA = 0;
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    PFN_vkGetShaderInfoAMD          vkGetShaderInfoAMD          = 0;
    PFN_vkGetSwapchainCounterEXT    vkGetSwapchainCounterEXT    = 0;
    PFN_vkGetSwapchainImagesKHR     vkGetSwapchainImagesKHR     = 0;
    PFN_vkGetSwapchainStatusKHR     vkGetSwapchainStatusKHR     = 0;
    PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetVideoSessionMemoryRequirementsKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0;
#else
    PFN_dummy placeholder_dont_call_vkGetWinrtDisplayNV = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkImportFenceWin32HandleKHR = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkImportSemaphoreWin32HandleKHR = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
    PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0;
#else
    PFN_dummy placeholder_dont_call_vkImportSemaphoreZirconHandleFUCHSIA = 0;
#endif /*VK_USE_PLATFORM_FUCHSIA*/
    PFN_vkInitializePerformanceApiINTEL         vkInitializePerformanceApiINTEL         = 0;
    PFN_vkInvalidateMappedMemoryRanges          vkInvalidateMappedMemoryRanges          = 0;
    PFN_vkMapMemory                             vkMapMemory                             = 0;
    PFN_vkMergePipelineCaches                   vkMergePipelineCaches                   = 0;
    PFN_vkMergeValidationCachesEXT              vkMergeValidationCachesEXT              = 0;
    PFN_vkQueueBeginDebugUtilsLabelEXT          vkQueueBeginDebugUtilsLabelEXT          = 0;
    PFN_vkQueueBindSparse                       vkQueueBindSparse                       = 0;
    PFN_vkQueueEndDebugUtilsLabelEXT            vkQueueEndDebugUtilsLabelEXT            = 0;
    PFN_vkQueueInsertDebugUtilsLabelEXT         vkQueueInsertDebugUtilsLabelEXT         = 0;
    PFN_vkQueuePresentKHR                       vkQueuePresentKHR                       = 0;
    PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
    PFN_vkQueueSubmit                           vkQueueSubmit                           = 0;
    PFN_vkQueueSubmit2KHR                       vkQueueSubmit2KHR                       = 0;
    PFN_vkQueueWaitIdle                         vkQueueWaitIdle                         = 0;
    PFN_vkRegisterDeviceEventEXT                vkRegisterDeviceEventEXT                = 0;
    PFN_vkRegisterDisplayEventEXT               vkRegisterDisplayEventEXT               = 0;
    PFN_vkReleaseDisplayEXT                     vkReleaseDisplayEXT                     = 0;
#if defined( VK_USE_PLATFORM_WIN32_KHR )
    PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
#else
    PFN_dummy placeholder_dont_call_vkReleaseFullScreenExclusiveModeEXT = 0;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
    PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
    PFN_vkReleaseProfilingLockKHR              vkReleaseProfilingLockKHR              = 0;
    PFN_vkResetCommandBuffer                   vkResetCommandBuffer                   = 0;
    PFN_vkResetCommandPool                     vkResetCommandPool                     = 0;
    PFN_vkResetDescriptorPool                  vkResetDescriptorPool                  = 0;
    PFN_vkResetEvent                           vkResetEvent                           = 0;
    PFN_vkResetFences                          vkResetFences                          = 0;
    PFN_vkResetQueryPool                       vkResetQueryPool                       = 0;
    PFN_vkResetQueryPoolEXT                    vkResetQueryPoolEXT                    = 0;
    PFN_vkSetDebugUtilsObjectNameEXT           vkSetDebugUtilsObjectNameEXT           = 0;
    PFN_vkSetDebugUtilsObjectTagEXT            vkSetDebugUtilsObjectTagEXT            = 0;
    PFN_vkSetEvent                             vkSetEvent                             = 0;
    PFN_vkSetHdrMetadataEXT                    vkSetHdrMetadataEXT                    = 0;
    PFN_vkSetLocalDimmingAMD                   vkSetLocalDimmingAMD                   = 0;
    PFN_vkSetPrivateDataEXT                    vkSetPrivateDataEXT                    = 0;
    PFN_vkSignalSemaphore                      vkSignalSemaphore                      = 0;
    PFN_vkSignalSemaphoreKHR                   vkSignalSemaphoreKHR                   = 0;
    PFN_vkSubmitDebugUtilsMessageEXT           vkSubmitDebugUtilsMessageEXT           = 0;
    PFN_vkTrimCommandPool                      vkTrimCommandPool                      = 0;
    PFN_vkTrimCommandPoolKHR                   vkTrimCommandPoolKHR                   = 0;
    PFN_vkUninitializePerformanceApiINTEL      vkUninitializePerformanceApiINTEL      = 0;
    PFN_vkUnmapMemory                          vkUnmapMemory                          = 0;
    PFN_vkUpdateDescriptorSetWithTemplate      vkUpdateDescriptorSetWithTemplate      = 0;
    PFN_vkUpdateDescriptorSetWithTemplateKHR   vkUpdateDescriptorSetWithTemplateKHR   = 0;
    PFN_vkUpdateDescriptorSets                 vkUpdateDescriptorSets                 = 0;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
    PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0;
#else
    PFN_dummy placeholder_dont_call_vkUpdateVideoSessionParametersKHR = 0;
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
    PFN_vkWaitForFences                            vkWaitForFences                            = 0;
    PFN_vkWaitSemaphores                           vkWaitSemaphores                           = 0;
    PFN_vkWaitSemaphoresKHR                        vkWaitSemaphoresKHR                        = 0;
    PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0;

  public:
    DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT                                    = default;
    DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default;

#if !defined( VK_NO_PROTOTYPES )
    // This interface is designed to be used for per-device function pointers in combination with a linked vulkan
    // library.
    template <typename DynamicLoader>
    void init( VULKAN_HPP_NAMESPACE::Instance const & instance,
               VULKAN_HPP_NAMESPACE::Device const &   device,
               DynamicLoader const &                  dl ) VULKAN_HPP_NOEXCEPT
    {
      PFN_vkGetInstanceProcAddr getInstanceProcAddr =
        dl.template getProcAddress<PFN_vkGetInstanceProcAddr>( "vkGetInstanceProcAddr" );
      PFN_vkGetDeviceProcAddr getDeviceProcAddr =
        dl.template getProcAddress<PFN_vkGetDeviceProcAddr>( "vkGetDeviceProcAddr" );
      init( static_cast<VkInstance>( instance ),
            getInstanceProcAddr,
            static_cast<VkDevice>( device ),
            device ? getDeviceProcAddr : nullptr );
    }

    // This interface is designed to be used for per-device function pointers in combination with a linked vulkan
    // library.
    template <typename DynamicLoader
#  if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
              = VULKAN_HPP_NAMESPACE::DynamicLoader
#  endif
              >
    void init( VULKAN_HPP_NAMESPACE::Instance const & instance,
               VULKAN_HPP_NAMESPACE::Device const &   device ) VULKAN_HPP_NOEXCEPT
    {
      static DynamicLoader dl;
      init( instance, device, dl );
    }
#endif  // !defined( VK_NO_PROTOTYPES )

    DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT
    {
      init( getInstanceProcAddr );
    }

    void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( getInstanceProcAddr );

      vkGetInstanceProcAddr = getInstanceProcAddr;
      vkCreateInstance      = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) );
      vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties(
        vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) );
      vkEnumerateInstanceLayerProperties =
        PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) );
      vkEnumerateInstanceVersion =
        PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) );
    }

    // This interface does not require a linked vulkan library.
    DispatchLoaderDynamic( VkInstance                instance,
                           PFN_vkGetInstanceProcAddr getInstanceProcAddr,
                           VkDevice                  device            = {},
                           PFN_vkGetDeviceProcAddr   getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT
    {
      init( instance, getInstanceProcAddr, device, getDeviceProcAddr );
    }

    // This interface does not require a linked vulkan library.
    void init( VkInstance                instance,
               PFN_vkGetInstanceProcAddr getInstanceProcAddr,
               VkDevice                  device              = {},
               PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT
    {
      VULKAN_HPP_ASSERT( instance && getInstanceProcAddr );
      vkGetInstanceProcAddr = getInstanceProcAddr;
      init( VULKAN_HPP_NAMESPACE::Instance( instance ) );
      if ( device )
      {
        init( VULKAN_HPP_NAMESPACE::Device( device ) );
      }
    }

    void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT
    {
      VkInstance instance = static_cast<VkInstance>( instanceCpp );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkAcquireWinrtDisplayNV =
        PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
      vkAcquireXlibDisplayEXT =
        PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
      vkCreateAndroidSurfaceKHR =
        PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
      vkCreateDebugReportCallbackEXT =
        PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
      vkCreateDebugUtilsMessengerEXT =
        PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
      vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
      vkCreateDirectFBSurfaceEXT =
        PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
      vkCreateDisplayModeKHR =
        PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
      vkCreateDisplayPlaneSurfaceKHR =
        PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
      vkCreateHeadlessSurfaceEXT =
        PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
#if defined( VK_USE_PLATFORM_IOS_MVK )
      vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) );
#endif /*VK_USE_PLATFORM_IOS_MVK*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
      vkCreateImagePipeSurfaceFUCHSIA =
        PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_MACOS_MVK )
      vkCreateMacOSSurfaceMVK =
        PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
#endif /*VK_USE_PLATFORM_MACOS_MVK*/
#if defined( VK_USE_PLATFORM_METAL_EXT )
      vkCreateMetalSurfaceEXT =
        PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
#endif /*VK_USE_PLATFORM_METAL_EXT*/
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
      vkCreateScreenSurfaceQNX =
        PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
#if defined( VK_USE_PLATFORM_GGP )
      vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP(
        vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
#endif /*VK_USE_PLATFORM_GGP*/
#if defined( VK_USE_PLATFORM_VI_NN )
      vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) );
#endif /*VK_USE_PLATFORM_VI_NN*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
      vkCreateWaylandSurfaceKHR =
        PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkCreateWin32SurfaceKHR =
        PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
      vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_XLIB_KHR )
      vkCreateXlibSurfaceKHR =
        PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
      vkDebugReportMessageEXT =
        PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
      vkDestroyDebugReportCallbackEXT =
        PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
      vkDestroyDebugUtilsMessengerEXT =
        PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
      vkDestroyInstance   = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
      vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
      vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties(
        vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
      vkEnumerateDeviceLayerProperties =
        PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
      vkEnumeratePhysicalDeviceGroups =
        PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
      vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR(
        vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
      if ( !vkEnumeratePhysicalDeviceGroups )
        vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
      vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR =
        PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
          vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
      vkEnumeratePhysicalDevices =
        PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
      vkGetDisplayModeProperties2KHR =
        PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
      vkGetDisplayModePropertiesKHR =
        PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
      vkGetDisplayPlaneCapabilities2KHR =
        PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
      vkGetDisplayPlaneCapabilitiesKHR =
        PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
      vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR(
        vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
      vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr( vkGetInstanceProcAddr( instance, "vkGetInstanceProcAddr" ) );
      vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
      vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
      vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
      vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
      vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
      vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
      vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
      vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
      vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
      if ( !vkGetPhysicalDeviceExternalBufferProperties )
        vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
      vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
      vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
      if ( !vkGetPhysicalDeviceExternalFenceProperties )
        vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
      vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
      vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
      vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
      if ( !vkGetPhysicalDeviceExternalSemaphoreProperties )
        vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
      vkGetPhysicalDeviceFeatures =
        PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
      vkGetPhysicalDeviceFeatures2 =
        PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
      vkGetPhysicalDeviceFeatures2KHR =
        PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
      if ( !vkGetPhysicalDeviceFeatures2 )
        vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
      vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
      vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
      vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
      if ( !vkGetPhysicalDeviceFormatProperties2 )
        vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
      vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
      vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
      vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
      vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
      if ( !vkGetPhysicalDeviceImageFormatProperties2 )
        vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
      vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
      vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
      vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
      if ( !vkGetPhysicalDeviceMemoryProperties2 )
        vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
      vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
      vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
      vkGetPhysicalDeviceProperties =
        PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
      vkGetPhysicalDeviceProperties2 =
        PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
      vkGetPhysicalDeviceProperties2KHR =
        PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
      if ( !vkGetPhysicalDeviceProperties2 )
        vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
      vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR =
        PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
          vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
      vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
      vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
      vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
      if ( !vkGetPhysicalDeviceQueueFamilyProperties2 )
        vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
      vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
      vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
      vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
      vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
      if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 )
        vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
      vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV =
        PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
          vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
      vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
      vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
      vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
      vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
      vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
      vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
      vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
      vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_XCB_KHR )
      vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
#endif /*VK_USE_PLATFORM_XCB_KHR*/
#if defined( VK_USE_PLATFORM_XLIB_KHR )
      vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR(
        vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
#endif /*VK_USE_PLATFORM_XLIB_KHR*/
#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
      vkGetRandROutputDisplayEXT =
        PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
      vkSubmitDebugUtilsMessageEXT =
        PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT(
        vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkAcquireNextImage2KHR =
        PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL(
        vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
      vkAcquireProfilingLockKHR =
        PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
      vkAllocateCommandBuffers =
        PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
      vkAllocateDescriptorSets =
        PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
      vkAllocateMemory     = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) );
      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV(
        vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
      vkBindBufferMemory  = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
      vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
      vkBindBufferMemory2KHR =
        PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
      if ( !vkBindBufferMemory2 )
        vkBindBufferMemory2 = vkBindBufferMemory2KHR;
      vkBindImageMemory     = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
      vkBindImageMemory2    = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) );
      if ( !vkBindImageMemory2 )
        vkBindImageMemory2 = vkBindImageMemory2KHR;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkBindVideoSessionMemoryKHR =
        PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkBuildAccelerationStructuresKHR =
        PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) );
      vkCmdBeginConditionalRenderingEXT =
        PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) );
      vkCmdBeginDebugUtilsLabelEXT =
        PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) );
      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
      vkCmdBeginQueryIndexedEXT =
        PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
      vkCmdBeginRenderPass  = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
      vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
      vkCmdBeginRenderPass2KHR =
        PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
      if ( !vkCmdBeginRenderPass2 )
        vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
      vkCmdBeginTransformFeedbackEXT =
        PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdBeginVideoCodingKHR =
        PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdBindDescriptorSets =
        PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) );
      vkCmdBindPipeline    = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) );
      vkCmdBindPipelineShaderGroupNV =
        PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) );
      vkCmdBindShadingRateImageNV =
        PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) );
      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT(
        vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) );
      vkCmdBindVertexBuffers =
        PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) );
      vkCmdBindVertexBuffers2EXT =
        PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) );
      vkCmdBlitImage     = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) );
      vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) );
      vkCmdBuildAccelerationStructureNV =
        PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) );
      vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR(
        vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
      vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR(
        vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) );
      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) );
      vkCmdClearColorImage  = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) );
      vkCmdClearDepthStencilImage =
        PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdControlVideoCodingKHR =
        PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdCopyAccelerationStructureKHR =
        PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) );
      vkCmdCopyAccelerationStructureNV =
        PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) );
      vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR(
        vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
      vkCmdCopyBuffer     = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) );
      vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) );
      vkCmdCopyBufferToImage =
        PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) );
      vkCmdCopyBufferToImage2KHR =
        PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) );
      vkCmdCopyImage     = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) );
      vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) );
      vkCmdCopyImageToBuffer =
        PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) );
      vkCmdCopyImageToBuffer2KHR =
        PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) );
      vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR(
        vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
      vkCmdCopyQueryPoolResults =
        PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) );
      vkCmdCuLaunchKernelNVX =
        PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) );
      vkCmdDebugMarkerBeginEXT =
        PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) );
      vkCmdDebugMarkerEndEXT =
        PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) );
      vkCmdDebugMarkerInsertEXT =
        PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdDispatch        = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) );
      vkCmdDispatchBase    = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) );
      if ( !vkCmdDispatchBase )
        vkCmdDispatchBase = vkCmdDispatchBaseKHR;
      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) );
      vkCmdDraw             = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
      vkCmdDrawIndexed      = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
      vkCmdDrawIndexedIndirect =
        PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
      vkCmdDrawIndexedIndirectCount =
        PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
      vkCmdDrawIndexedIndirectCountAMD =
        PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
      if ( !vkCmdDrawIndexedIndirectCount )
        vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
      vkCmdDrawIndexedIndirectCountKHR =
        PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
      if ( !vkCmdDrawIndexedIndirectCount )
        vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
      vkCmdDrawIndirectByteCountEXT =
        PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
      vkCmdDrawIndirectCount =
        PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
      vkCmdDrawIndirectCountAMD =
        PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
      if ( !vkCmdDrawIndirectCount )
        vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
      vkCmdDrawIndirectCountKHR =
        PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
      if ( !vkCmdDrawIndirectCount )
        vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
      vkCmdDrawMeshTasksIndirectCountNV =
        PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
      vkCmdDrawMeshTasksIndirectNV =
        PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) );
      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdEndConditionalRenderingEXT =
        PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) );
      vkCmdEndDebugUtilsLabelEXT =
        PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) );
      vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
      vkCmdEndQueryIndexedEXT =
        PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
      vkCmdEndRenderPass  = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
      vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
      vkCmdEndRenderPass2KHR =
        PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
      if ( !vkCmdEndRenderPass2 )
        vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
      vkCmdEndTransformFeedbackEXT =
        PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdEndVideoCodingKHR =
        PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
      vkCmdExecuteGeneratedCommandsNV =
        PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) );
      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
      vkCmdInsertDebugUtilsLabelEXT =
        PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
      vkCmdNextSubpass     = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
      vkCmdNextSubpass2    = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) );
      if ( !vkCmdNextSubpass2 )
        vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
      vkCmdPipelineBarrier2KHR =
        PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) );
      vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV(
        vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
      vkCmdPushDescriptorSetKHR =
        PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR(
        vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
      vkCmdResetEvent       = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
      vkCmdResetEvent2KHR   = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) );
      vkCmdResetQueryPool   = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
      vkCmdResolveImage     = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
      vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) );
      vkCmdSetBlendConstants =
        PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) );
      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) );
      vkCmdSetCoarseSampleOrderNV =
        PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) );
      vkCmdSetColorWriteEnableEXT =
        PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) );
      vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) );
      vkCmdSetDepthBias   = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) );
      vkCmdSetDepthBiasEnableEXT =
        PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) );
      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) );
      vkCmdSetDepthBoundsTestEnableEXT =
        PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) );
      vkCmdSetDepthCompareOpEXT =
        PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) );
      vkCmdSetDepthTestEnableEXT =
        PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) );
      vkCmdSetDepthWriteEnableEXT =
        PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) );
      vkCmdSetDeviceMask    = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) );
      if ( !vkCmdSetDeviceMask )
        vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
      vkCmdSetDiscardRectangleEXT =
        PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
      vkCmdSetEvent     = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
      vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) );
      vkCmdSetExclusiveScissorNV =
        PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
      vkCmdSetFragmentShadingRateEnumNV =
        PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) );
      vkCmdSetFragmentShadingRateKHR =
        PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) );
      vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) );
      vkCmdSetLineStippleEXT =
        PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) );
      vkCmdSetLineWidth  = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) );
      vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) );
      vkCmdSetPatchControlPointsEXT =
        PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) );
      vkCmdSetPerformanceMarkerINTEL =
        PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) );
      vkCmdSetPerformanceOverrideINTEL =
        PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) );
      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL(
        vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
      vkCmdSetPrimitiveRestartEnableEXT =
        PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) );
      vkCmdSetPrimitiveTopologyEXT =
        PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) );
      vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT(
        vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) );
      vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR(
        vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
      vkCmdSetSampleLocationsEXT =
        PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) );
      vkCmdSetScissorWithCountEXT =
        PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) );
      vkCmdSetStencilCompareMask =
        PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) );
      vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) );
      vkCmdSetStencilReference =
        PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) );
      vkCmdSetStencilTestEnableEXT =
        PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) );
      vkCmdSetStencilWriteMask =
        PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) );
      vkCmdSetVertexInputEXT =
        PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) );
      vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) );
      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV(
        vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) );
      vkCmdSetViewportWScalingNV =
        PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) );
      vkCmdSetViewportWithCountEXT =
        PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) );
      vkCmdTraceRaysIndirectKHR =
        PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) );
      vkCmdTraceRaysKHR   = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) );
      vkCmdTraceRaysNV    = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
      vkCmdUpdateBuffer   = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
      vkCmdWaitEvents     = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
      vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) );
      vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV(
        vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
      vkCmdWriteBufferMarker2AMD =
        PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) );
      vkCmdWriteBufferMarkerAMD =
        PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
      vkCmdWriteTimestamp2KHR =
        PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) );
      vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
      vkCopyAccelerationStructureKHR =
        PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
      vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR(
        vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
      vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR(
        vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) );
      vkCreateAccelerationStructureKHR =
        PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) );
      vkCreateAccelerationStructureNV =
        PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) );
      vkCreateBuffer      = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) );
      vkCreateBufferView  = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) );
      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) );
      vkCreateComputePipelines =
        PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) );
      vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) );
      vkCreateCuModuleNVX   = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) );
      vkCreateDeferredOperationKHR =
        PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) );
      vkCreateDescriptorPool =
        PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) );
      vkCreateDescriptorSetLayout =
        PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) );
      vkCreateDescriptorUpdateTemplate =
        PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR(
        vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) );
      if ( !vkCreateDescriptorUpdateTemplate )
        vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
      vkCreateEvent       = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) );
      vkCreateFence       = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) );
      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) );
      vkCreateGraphicsPipelines =
        PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) );
      vkCreateImage     = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) );
      vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) );
      vkCreateIndirectCommandsLayoutNV =
        PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) );
      vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) );
      vkCreatePipelineLayout =
        PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) );
      vkCreatePrivateDataSlotEXT =
        PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) );
      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
      vkCreateRayTracingPipelinesKHR =
        PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) );
      vkCreateRayTracingPipelinesNV =
        PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
      vkCreateRenderPass  = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
      vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
      vkCreateRenderPass2KHR =
        PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
      if ( !vkCreateRenderPass2 )
        vkCreateRenderPass2 = vkCreateRenderPass2KHR;
      vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
      vkCreateSamplerYcbcrConversion =
        PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
      vkCreateSamplerYcbcrConversionKHR =
        PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) );
      if ( !vkCreateSamplerYcbcrConversion )
        vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
      vkCreateSemaphore    = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) );
      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) );
      vkCreateSharedSwapchainsKHR =
        PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) );
      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) );
      vkCreateValidationCacheEXT =
        PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCreateVideoSessionKHR =
        PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCreateVideoSessionParametersKHR =
        PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkDebugMarkerSetObjectNameEXT =
        PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) );
      vkDebugMarkerSetObjectTagEXT =
        PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) );
      vkDeferredOperationJoinKHR =
        PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) );
      vkDestroyAccelerationStructureKHR =
        PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) );
      vkDestroyAccelerationStructureNV =
        PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) );
      vkDestroyBuffer      = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) );
      vkDestroyBufferView  = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) );
      vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) );
      vkDestroyCuFunctionNVX =
        PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) );
      vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) );
      vkDestroyDeferredOperationKHR =
        PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) );
      vkDestroyDescriptorPool =
        PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) );
      vkDestroyDescriptorSetLayout =
        PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) );
      vkDestroyDescriptorUpdateTemplate =
        PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR(
        vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) );
      if ( !vkDestroyDescriptorUpdateTemplate )
        vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
      vkDestroyDevice      = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) );
      vkDestroyEvent       = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) );
      vkDestroyFence       = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) );
      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) );
      vkDestroyImage       = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) );
      vkDestroyImageView   = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) );
      vkDestroyIndirectCommandsLayoutNV =
        PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) );
      vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) );
      vkDestroyPipelineCache =
        PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) );
      vkDestroyPipelineLayout =
        PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) );
      vkDestroyPrivateDataSlotEXT =
        PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) );
      vkDestroyQueryPool  = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) );
      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) );
      vkDestroySampler    = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) );
      vkDestroySamplerYcbcrConversion =
        PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR(
        vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) );
      if ( !vkDestroySamplerYcbcrConversion )
        vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
      vkDestroySemaphore    = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) );
      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) );
      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) );
      vkDestroyValidationCacheEXT =
        PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkDestroyVideoSessionKHR =
        PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR(
        vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) );
      vkDisplayPowerControlEXT =
        PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) );
      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
      vkFlushMappedMemoryRanges =
        PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) );
      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) );
      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) );
      vkFreeMemory         = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) );
      vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR(
        vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) );
      vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR(
        vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) );
      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV(
        vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) );
      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV(
        vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID(
        vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
      vkGetBufferDeviceAddress =
        PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
      vkGetBufferDeviceAddressEXT =
        PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
      if ( !vkGetBufferDeviceAddress )
        vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
      vkGetBufferDeviceAddressKHR =
        PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
      if ( !vkGetBufferDeviceAddress )
        vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
      vkGetBufferMemoryRequirements =
        PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
      vkGetBufferMemoryRequirements2 =
        PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
      vkGetBufferMemoryRequirements2KHR =
        PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
      if ( !vkGetBufferMemoryRequirements2 )
        vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
      vkGetBufferOpaqueCaptureAddress =
        PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
      vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR(
        vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
      if ( !vkGetBufferOpaqueCaptureAddress )
        vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
      vkGetCalibratedTimestampsEXT =
        PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
      vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR(
        vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
      vkGetDeferredOperationResultKHR =
        PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) );
      vkGetDescriptorSetLayoutSupport =
        PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR(
        vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
      if ( !vkGetDescriptorSetLayoutSupport )
        vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
      vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR(
        vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures(
        vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR(
        vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
      if ( !vkGetDeviceGroupPeerMemoryFeatures )
        vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT(
        vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR(
        vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
      vkGetDeviceMemoryCommitment =
        PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
      vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress(
        vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR(
        vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
      if ( !vkGetDeviceMemoryOpaqueCaptureAddress )
        vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
      vkGetDeviceQueue    = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
      vkGetDeviceQueue2   = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
      vkGetEventStatus    = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) );
      vkGetFenceFdKHR     = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) );
      vkGetFenceStatus    = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetFenceWin32HandleKHR =
        PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV(
        vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT(
        vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
      vkGetImageMemoryRequirements =
        PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) );
      vkGetImageMemoryRequirements2 =
        PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
      vkGetImageMemoryRequirements2KHR =
        PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) );
      if ( !vkGetImageMemoryRequirements2 )
        vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements(
        vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) );
      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2(
        vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR(
        vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) );
      if ( !vkGetImageSparseMemoryRequirements2 )
        vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
      vkGetImageSubresourceLayout =
        PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) );
      vkGetImageViewAddressNVX =
        PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) );
      vkGetImageViewHandleNVX =
        PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) );
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID(
        vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) );
      vkGetMemoryFdPropertiesKHR =
        PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) );
      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT(
        vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetMemoryWin32HandleKHR =
        PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetMemoryWin32HandleNV =
        PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
      vkGetMemoryZirconHandleFUCHSIA =
        PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
      vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA(
        vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      vkGetPastPresentationTimingGOOGLE =
        PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) );
      vkGetPerformanceParameterINTEL =
        PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) );
      vkGetPipelineCacheData =
        PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) );
      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR(
        vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) );
      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR(
        vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
      vkGetPrivateDataEXT   = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) );
      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
      vkGetQueueCheckpointData2NV =
        PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) );
      vkGetQueueCheckpointDataNV =
        PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
      vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
        vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
      vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR(
        vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) );
      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV(
        vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
      if ( !vkGetRayTracingShaderGroupHandlesKHR )
        vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
      vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR(
        vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
      vkGetRefreshCycleDurationGOOGLE =
        PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
      vkGetRenderAreaGranularity =
        PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
      vkGetSemaphoreCounterValue =
        PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
      vkGetSemaphoreCounterValueKHR =
        PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
      if ( !vkGetSemaphoreCounterValue )
        vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetSemaphoreWin32HandleKHR =
        PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
      vkGetSemaphoreZirconHandleFUCHSIA =
        PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) );
      vkGetSwapchainCounterEXT =
        PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) );
      vkGetSwapchainImagesKHR =
        PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) );
      vkGetSwapchainStatusKHR =
        PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) );
      vkGetValidationCacheDataEXT =
        PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR(
        vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkImportFenceWin32HandleKHR =
        PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkImportSemaphoreFdKHR =
        PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkImportSemaphoreWin32HandleKHR =
        PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
      vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA(
        vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      vkInitializePerformanceApiINTEL =
        PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) );
      vkInvalidateMappedMemoryRanges =
        PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) );
      vkMapMemory           = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) );
      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) );
      vkMergeValidationCachesEXT =
        PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) );
      vkQueueBeginDebugUtilsLabelEXT =
        PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
      vkQueueEndDebugUtilsLabelEXT =
        PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
      vkQueueInsertDebugUtilsLabelEXT =
        PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL(
        vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
      vkQueueSubmit     = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
      vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) );
      vkQueueWaitIdle   = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
      vkRegisterDeviceEventEXT =
        PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
      vkRegisterDisplayEventEXT =
        PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT(
        vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL(
        vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
      vkReleaseProfilingLockKHR =
        PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
      vkResetCommandBuffer  = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
      vkResetCommandPool    = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
      vkResetEvent          = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
      vkResetFences         = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
      vkResetQueryPool      = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
      vkResetQueryPoolEXT   = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) );
      if ( !vkResetQueryPool )
        vkResetQueryPool = vkResetQueryPoolEXT;
      vkSetDebugUtilsObjectNameEXT =
        PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
      vkSetDebugUtilsObjectTagEXT =
        PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
      vkSetEvent           = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
      vkSetHdrMetadataEXT  = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) );
      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) );
      vkSetPrivateDataEXT  = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) );
      vkSignalSemaphore    = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) );
      if ( !vkSignalSemaphore )
        vkSignalSemaphore = vkSignalSemaphoreKHR;
      vkTrimCommandPool    = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) );
      if ( !vkTrimCommandPool )
        vkTrimCommandPool = vkTrimCommandPoolKHR;
      vkUninitializePerformanceApiINTEL =
        PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) );
      vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) );
      vkUpdateDescriptorSetWithTemplate =
        PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR(
        vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
      if ( !vkUpdateDescriptorSetWithTemplate )
        vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
      vkUpdateDescriptorSets =
        PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkUpdateVideoSessionParametersKHR =
        PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkWaitForFences     = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
      vkWaitSemaphores    = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) );
      if ( !vkWaitSemaphores )
        vkWaitSemaphores = vkWaitSemaphoresKHR;
      vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR(
        vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) );
    }

    void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
    {
      VkDevice device = static_cast<VkDevice>( deviceCpp );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkAcquireFullScreenExclusiveModeEXT =
        PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
      vkAcquireNextImageKHR  = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL(
        vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
      vkAcquireProfilingLockKHR =
        PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
      vkAllocateCommandBuffers =
        PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
      vkAllocateDescriptorSets =
        PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
      vkAllocateMemory     = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
      vkBindAccelerationStructureMemoryNV =
        PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
      vkBindBufferMemory     = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
      vkBindBufferMemory2    = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
      if ( !vkBindBufferMemory2 )
        vkBindBufferMemory2 = vkBindBufferMemory2KHR;
      vkBindImageMemory     = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
      vkBindImageMemory2    = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
      if ( !vkBindImageMemory2 )
        vkBindImageMemory2 = vkBindImageMemory2KHR;
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkBindVideoSessionMemoryKHR =
        PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkBuildAccelerationStructuresKHR =
        PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
      vkCmdBeginConditionalRenderingEXT =
        PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
      vkCmdBeginDebugUtilsLabelEXT =
        PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
      vkCmdBeginQueryIndexedEXT =
        PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
      vkCmdBeginRenderPass  = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
      vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
      vkCmdBeginRenderPass2KHR =
        PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
      if ( !vkCmdBeginRenderPass2 )
        vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
      vkCmdBeginTransformFeedbackEXT =
        PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdBeginVideoCodingKHR =
        PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
      vkCmdBindIndexBuffer    = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
      vkCmdBindPipeline       = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
      vkCmdBindPipelineShaderGroupNV =
        PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
      vkCmdBindShadingRateImageNV =
        PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT(
        vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
      vkCmdBindVertexBuffers2EXT =
        PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
      vkCmdBlitImage     = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
      vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
      vkCmdBuildAccelerationStructureNV =
        PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
      vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR(
        vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
      vkCmdBuildAccelerationStructuresKHR =
        PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
      vkCmdClearColorImage  = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
      vkCmdClearDepthStencilImage =
        PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdControlVideoCodingKHR =
        PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdCopyAccelerationStructureKHR =
        PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
      vkCmdCopyAccelerationStructureNV =
        PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
      vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR(
        vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
      vkCmdCopyBuffer        = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
      vkCmdCopyBuffer2KHR    = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
      vkCmdCopyBufferToImage2KHR =
        PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
      vkCmdCopyImage         = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
      vkCmdCopyImage2KHR     = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
      vkCmdCopyImageToBuffer2KHR =
        PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
      vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR(
        vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
      vkCmdCopyQueryPoolResults =
        PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
      vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) );
      vkCmdDebugMarkerBeginEXT =
        PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
      vkCmdDebugMarkerInsertEXT =
        PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdDispatch        = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
      vkCmdDispatchBase    = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
      if ( !vkCmdDispatchBase )
        vkCmdDispatchBase = vkCmdDispatchBaseKHR;
      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
      vkCmdDraw             = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
      vkCmdDrawIndexed      = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
      vkCmdDrawIndexedIndirect =
        PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
      vkCmdDrawIndexedIndirectCount =
        PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
      vkCmdDrawIndexedIndirectCountAMD =
        PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
      if ( !vkCmdDrawIndexedIndirectCount )
        vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
      vkCmdDrawIndexedIndirectCountKHR =
        PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
      if ( !vkCmdDrawIndexedIndirectCount )
        vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
      vkCmdDrawIndirectByteCountEXT =
        PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
      vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
      vkCmdDrawIndirectCountAMD =
        PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
      if ( !vkCmdDrawIndirectCount )
        vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
      vkCmdDrawIndirectCountKHR =
        PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
      if ( !vkCmdDrawIndirectCount )
        vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
      vkCmdDrawMeshTasksIndirectCountNV =
        PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
      vkCmdDrawMeshTasksIndirectNV =
        PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdEndConditionalRenderingEXT =
        PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
      vkCmdEndDebugUtilsLabelEXT =
        PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
      vkCmdEndQuery           = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
      vkCmdEndRenderPass      = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
      vkCmdEndRenderPass2     = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
      vkCmdEndRenderPass2KHR  = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
      if ( !vkCmdEndRenderPass2 )
        vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
      vkCmdEndTransformFeedbackEXT =
        PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
      vkCmdExecuteGeneratedCommandsNV =
        PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
      vkCmdInsertDebugUtilsLabelEXT =
        PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
      vkCmdNextSubpass     = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
      vkCmdNextSubpass2    = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
      if ( !vkCmdNextSubpass2 )
        vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
      vkCmdPipelineBarrier2KHR =
        PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
      vkCmdPreprocessGeneratedCommandsNV =
        PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
      vkCmdPushDescriptorSetKHR =
        PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR(
        vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
      vkCmdResetEvent        = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
      vkCmdResetEvent2KHR    = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) );
      vkCmdResetQueryPool    = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
      vkCmdResolveImage      = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
      vkCmdResolveImage2KHR  = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
      vkCmdSetCheckpointNV   = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
      vkCmdSetCoarseSampleOrderNV =
        PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
      vkCmdSetColorWriteEnableEXT =
        PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
      vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
      vkCmdSetDepthBias   = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
      vkCmdSetDepthBiasEnableEXT =
        PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
      vkCmdSetDepthBoundsTestEnableEXT =
        PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
      vkCmdSetDepthCompareOpEXT =
        PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
      vkCmdSetDepthTestEnableEXT =
        PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
      vkCmdSetDepthWriteEnableEXT =
        PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
      vkCmdSetDeviceMask    = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
      if ( !vkCmdSetDeviceMask )
        vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
      vkCmdSetDiscardRectangleEXT =
        PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
      vkCmdSetEvent     = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
      vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
      vkCmdSetExclusiveScissorNV =
        PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
      vkCmdSetFragmentShadingRateEnumNV =
        PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
      vkCmdSetFragmentShadingRateKHR =
        PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
      vkCmdSetFrontFaceEXT   = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
      vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
      vkCmdSetLineWidth      = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
      vkCmdSetLogicOpEXT     = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
      vkCmdSetPatchControlPointsEXT =
        PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
      vkCmdSetPerformanceMarkerINTEL =
        PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
      vkCmdSetPerformanceOverrideINTEL =
        PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL(
        vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
      vkCmdSetPrimitiveRestartEnableEXT =
        PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
      vkCmdSetPrimitiveTopologyEXT =
        PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
      vkCmdSetRasterizerDiscardEnableEXT =
        PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
      vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR(
        vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
      vkCmdSetSampleLocationsEXT =
        PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
      vkCmdSetScissorWithCountEXT =
        PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
      vkCmdSetStencilCompareMask =
        PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
      vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
      vkCmdSetStencilReference =
        PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
      vkCmdSetStencilTestEnableEXT =
        PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
      vkCmdSetStencilWriteMask =
        PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
      vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) );
      vkCmdSetViewport       = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV(
        vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
      vkCmdSetViewportWScalingNV =
        PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
      vkCmdSetViewportWithCountEXT =
        PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
      vkCmdTraceRaysIndirectKHR =
        PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
      vkCmdTraceRaysKHR   = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
      vkCmdTraceRaysNV    = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
      vkCmdUpdateBuffer   = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
      vkCmdWaitEvents     = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
      vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
      vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR(
        vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV(
        vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
      vkCmdWriteBufferMarker2AMD =
        PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
      vkCmdWriteBufferMarkerAMD =
        PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
      vkCmdWriteTimestamp     = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
      vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
      vkCompileDeferredNV     = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
      vkCopyAccelerationStructureKHR =
        PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
      vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR(
        vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
      vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR(
        vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
      vkCreateAccelerationStructureKHR =
        PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
      vkCreateAccelerationStructureNV =
        PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
      vkCreateBuffer      = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
      vkCreateBufferView  = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
      vkCreateComputePipelines =
        PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
      vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) );
      vkCreateCuModuleNVX   = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) );
      vkCreateDeferredOperationKHR =
        PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
      vkCreateDescriptorSetLayout =
        PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
      vkCreateDescriptorUpdateTemplate =
        PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
      vkCreateDescriptorUpdateTemplateKHR =
        PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
      if ( !vkCreateDescriptorUpdateTemplate )
        vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
      vkCreateEvent       = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
      vkCreateFence       = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
      vkCreateGraphicsPipelines =
        PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
      vkCreateImage     = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
      vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
      vkCreateIndirectCommandsLayoutNV =
        PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
      vkCreatePipelineCache  = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
      vkCreatePrivateDataSlotEXT =
        PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
      vkCreateRayTracingPipelinesKHR =
        PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
      vkCreateRayTracingPipelinesNV =
        PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
      vkCreateRenderPass     = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
      vkCreateRenderPass2    = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
      if ( !vkCreateRenderPass2 )
        vkCreateRenderPass2 = vkCreateRenderPass2KHR;
      vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
      vkCreateSamplerYcbcrConversion =
        PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
      vkCreateSamplerYcbcrConversionKHR =
        PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
      if ( !vkCreateSamplerYcbcrConversion )
        vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
      vkCreateSemaphore    = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
      vkCreateSharedSwapchainsKHR =
        PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
      vkCreateValidationCacheEXT =
        PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkCreateVideoSessionParametersKHR =
        PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkDebugMarkerSetObjectNameEXT =
        PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
      vkDebugMarkerSetObjectTagEXT =
        PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
      vkDeferredOperationJoinKHR =
        PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
      vkDestroyAccelerationStructureKHR =
        PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
      vkDestroyAccelerationStructureNV =
        PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
      vkDestroyBuffer        = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
      vkDestroyBufferView    = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
      vkDestroyCommandPool   = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
      vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) );
      vkDestroyCuModuleNVX   = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) );
      vkDestroyDeferredOperationKHR =
        PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
      vkDestroyDescriptorSetLayout =
        PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
      vkDestroyDescriptorUpdateTemplate =
        PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR(
        vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
      if ( !vkDestroyDescriptorUpdateTemplate )
        vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
      vkDestroyDevice      = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
      vkDestroyEvent       = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
      vkDestroyFence       = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
      vkDestroyImage       = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
      vkDestroyImageView   = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
      vkDestroyIndirectCommandsLayoutNV =
        PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
      vkDestroyPipeline       = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
      vkDestroyPipelineCache  = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
      vkDestroyPrivateDataSlotEXT =
        PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
      vkDestroyQueryPool  = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
      vkDestroySampler    = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
      vkDestroySamplerYcbcrConversion =
        PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
      vkDestroySamplerYcbcrConversionKHR =
        PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
      if ( !vkDestroySamplerYcbcrConversion )
        vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
      vkDestroySemaphore    = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
      vkDestroyValidationCacheEXT =
        PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkDestroyVideoSessionKHR =
        PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkDestroyVideoSessionParametersKHR =
        PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
      vkDisplayPowerControlEXT =
        PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
      vkFlushMappedMemoryRanges =
        PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
      vkFreeMemory         = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
      vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR(
        vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
      vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR(
        vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
      vkGetAccelerationStructureHandleNV =
        PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV(
        vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID(
        vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
      vkGetBufferDeviceAddress =
        PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
      vkGetBufferDeviceAddressEXT =
        PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
      if ( !vkGetBufferDeviceAddress )
        vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
      vkGetBufferDeviceAddressKHR =
        PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
      if ( !vkGetBufferDeviceAddress )
        vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
      vkGetBufferMemoryRequirements =
        PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
      vkGetBufferMemoryRequirements2 =
        PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
      vkGetBufferMemoryRequirements2KHR =
        PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
      if ( !vkGetBufferMemoryRequirements2 )
        vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
      vkGetBufferOpaqueCaptureAddress =
        PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
      vkGetBufferOpaqueCaptureAddressKHR =
        PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
      if ( !vkGetBufferOpaqueCaptureAddress )
        vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
      vkGetCalibratedTimestampsEXT =
        PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
      vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR(
        vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
      vkGetDeferredOperationResultKHR =
        PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
      vkGetDescriptorSetLayoutSupport =
        PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
      vkGetDescriptorSetLayoutSupportKHR =
        PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
      if ( !vkGetDescriptorSetLayoutSupport )
        vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
      vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR(
        vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
      vkGetDeviceGroupPeerMemoryFeatures =
        PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR(
        vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
      if ( !vkGetDeviceGroupPeerMemoryFeatures )
        vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR(
        vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT(
        vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR(
        vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
      vkGetDeviceMemoryCommitment =
        PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
      vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress(
        vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR(
        vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
      if ( !vkGetDeviceMemoryOpaqueCaptureAddress )
        vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
      vkGetDeviceQueue    = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
      vkGetDeviceQueue2   = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
      vkGetEventStatus    = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
      vkGetFenceFdKHR     = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
      vkGetFenceStatus    = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetFenceWin32HandleKHR =
        PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV(
        vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT(
        vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
      vkGetImageMemoryRequirements =
        PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
      vkGetImageMemoryRequirements2 =
        PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
      vkGetImageMemoryRequirements2KHR =
        PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
      if ( !vkGetImageMemoryRequirements2 )
        vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
      vkGetImageSparseMemoryRequirements =
        PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
      vkGetImageSparseMemoryRequirements2 =
        PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR(
        vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
      if ( !vkGetImageSparseMemoryRequirements2 )
        vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
      vkGetImageSubresourceLayout =
        PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
      vkGetImageViewAddressNVX =
        PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
      vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID(
        vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
      vkGetMemoryFdPropertiesKHR =
        PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
      vkGetMemoryHostPointerPropertiesEXT =
        PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetMemoryWin32HandleKHR =
        PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetMemoryWin32HandleNV =
        PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetMemoryWin32HandlePropertiesKHR =
        PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
      vkGetMemoryZirconHandleFUCHSIA =
        PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
      vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA(
        vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      vkGetPastPresentationTimingGOOGLE =
        PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
      vkGetPerformanceParameterINTEL =
        PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR(
        vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR(
        vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR(
        vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
      vkGetPrivateDataEXT   = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
      vkGetQueueCheckpointData2NV =
        PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
      vkGetQueueCheckpointDataNV =
        PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
      vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
        vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
      vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR(
        vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
      vkGetRayTracingShaderGroupHandlesNV =
        PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
      if ( !vkGetRayTracingShaderGroupHandlesKHR )
        vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
      vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR(
        vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
      vkGetRefreshCycleDurationGOOGLE =
        PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
      vkGetRenderAreaGranularity =
        PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
      vkGetSemaphoreCounterValue =
        PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
      vkGetSemaphoreCounterValueKHR =
        PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
      if ( !vkGetSemaphoreCounterValue )
        vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkGetSemaphoreWin32HandleKHR =
        PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
      vkGetSemaphoreZirconHandleFUCHSIA =
        PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) );
      vkGetSwapchainCounterEXT =
        PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
      vkGetValidationCacheDataEXT =
        PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR(
        vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkImportFenceWin32HandleKHR =
        PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkImportSemaphoreWin32HandleKHR =
        PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
#if defined( VK_USE_PLATFORM_FUCHSIA )
      vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA(
        vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
#endif /*VK_USE_PLATFORM_FUCHSIA*/
      vkInitializePerformanceApiINTEL =
        PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
      vkInvalidateMappedMemoryRanges =
        PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
      vkMapMemory           = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
      vkMergeValidationCachesEXT =
        PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
      vkQueueBeginDebugUtilsLabelEXT =
        PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
      vkQueueEndDebugUtilsLabelEXT =
        PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
      vkQueueInsertDebugUtilsLabelEXT =
        PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL(
        vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
      vkQueueSubmit     = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
      vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
      vkQueueWaitIdle   = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
      vkRegisterDeviceEventEXT =
        PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
      vkRegisterDisplayEventEXT =
        PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
#if defined( VK_USE_PLATFORM_WIN32_KHR )
      vkReleaseFullScreenExclusiveModeEXT =
        PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL(
        vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
      vkReleaseProfilingLockKHR =
        PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
      vkResetCommandBuffer  = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
      vkResetCommandPool    = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
      vkResetEvent          = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
      vkResetFences         = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
      vkResetQueryPool      = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
      vkResetQueryPoolEXT   = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
      if ( !vkResetQueryPool )
        vkResetQueryPool = vkResetQueryPoolEXT;
      vkSetDebugUtilsObjectNameEXT =
        PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
      vkSetDebugUtilsObjectTagEXT =
        PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
      vkSetEvent           = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
      vkSetHdrMetadataEXT  = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
      vkSetPrivateDataEXT  = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
      vkSignalSemaphore    = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
      if ( !vkSignalSemaphore )
        vkSignalSemaphore = vkSignalSemaphoreKHR;
      vkTrimCommandPool    = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
      if ( !vkTrimCommandPool )
        vkTrimCommandPool = vkTrimCommandPoolKHR;
      vkUninitializePerformanceApiINTEL =
        PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
      vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
      vkUpdateDescriptorSetWithTemplate =
        PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR(
        vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
      if ( !vkUpdateDescriptorSetWithTemplate )
        vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
#if defined( VK_ENABLE_BETA_EXTENSIONS )
      vkUpdateVideoSessionParametersKHR =
        PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
      vkWaitForFences     = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
      vkWaitSemaphores    = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
      if ( !vkWaitSemaphores )
        vkWaitSemaphores = vkWaitSemaphoresKHR;
      vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR(
        vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
    }
  };

}  // namespace VULKAN_HPP_NAMESPACE

namespace std
{
  template <>
  struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & accelerationStructureKHR ) const
      VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkAccelerationStructureKHR>{}(
        static_cast<VkAccelerationStructureKHR>( accelerationStructureKHR ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & accelerationStructureNV ) const
      VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkAccelerationStructureNV>{}(
        static_cast<VkAccelerationStructureNV>( accelerationStructureNV ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Buffer>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Buffer const & buffer ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkBuffer>{}( static_cast<VkBuffer>( buffer ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::BufferView>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferView const & bufferView ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkBufferView>{}( static_cast<VkBufferView>( bufferView ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::CommandBuffer>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBuffer const & commandBuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkCommandBuffer>{}( static_cast<VkCommandBuffer>( commandBuffer ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::CommandPool>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandPool const & commandPool ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkCommandPool>{}( static_cast<VkCommandPool>( commandPool ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::CuFunctionNVX>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::CuFunctionNVX const & cuFunctionNVX ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkCuFunctionNVX>{}( static_cast<VkCuFunctionNVX>( cuFunctionNVX ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::CuModuleNVX>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleNVX const & cuModuleNVX ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkCuModuleNVX>{}( static_cast<VkCuModuleNVX>( cuModuleNVX ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & debugReportCallbackEXT ) const
      VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDebugReportCallbackEXT>{}( static_cast<VkDebugReportCallbackEXT>( debugReportCallbackEXT ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & debugUtilsMessengerEXT ) const
      VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDebugUtilsMessengerEXT>{}( static_cast<VkDebugUtilsMessengerEXT>( debugUtilsMessengerEXT ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
  {
    std::size_t
      operator()( VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & deferredOperationKHR ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDeferredOperationKHR>{}( static_cast<VkDeferredOperationKHR>( deferredOperationKHR ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DescriptorPool>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorPool const & descriptorPool ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDescriptorPool>{}( static_cast<VkDescriptorPool>( descriptorPool ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DescriptorSet>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorSet const & descriptorSet ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDescriptorSet>{}( static_cast<VkDescriptorSet>( descriptorSet ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
  {
    std::size_t
      operator()( VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & descriptorSetLayout ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDescriptorSetLayout>{}( static_cast<VkDescriptorSetLayout>( descriptorSetLayout ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & descriptorUpdateTemplate ) const
      VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDescriptorUpdateTemplate>{}(
        static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Device>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Device const & device ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDevice>{}( static_cast<VkDevice>( device ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DeviceMemory>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceMemory const & deviceMemory ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDeviceMemory>{}( static_cast<VkDeviceMemory>( deviceMemory ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DisplayKHR>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayKHR const & displayKHR ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDisplayKHR>{}( static_cast<VkDisplayKHR>( displayKHR ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeKHR const & displayModeKHR ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkDisplayModeKHR>{}( static_cast<VkDisplayModeKHR>( displayModeKHR ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Event>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Event const & event ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkEvent>{}( static_cast<VkEvent>( event ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Fence>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Fence const & fence ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkFence>{}( static_cast<VkFence>( fence ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Framebuffer>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Framebuffer const & framebuffer ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkFramebuffer>{}( static_cast<VkFramebuffer>( framebuffer ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Image>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Image const & image ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkImage>{}( static_cast<VkImage>( image ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::ImageView>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageView const & imageView ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkImageView>{}( static_cast<VkImageView>( imageView ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & indirectCommandsLayoutNV ) const
      VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkIndirectCommandsLayoutNV>{}(
        static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayoutNV ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Instance>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Instance const & instance ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkInstance>{}( static_cast<VkInstance>( instance ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & performanceConfigurationINTEL )
      const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkPerformanceConfigurationINTEL>{}(
        static_cast<VkPerformanceConfigurationINTEL>( performanceConfigurationINTEL ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice const & physicalDevice ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkPhysicalDevice>{}( static_cast<VkPhysicalDevice>( physicalDevice ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Pipeline>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Pipeline const & pipeline ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkPipeline>{}( static_cast<VkPipeline>( pipeline ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::PipelineCache>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCache const & pipelineCache ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkPipelineCache>{}( static_cast<VkPipelineCache>( pipelineCache ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::PipelineLayout>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLayout const & pipelineLayout ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkPipelineLayout>{}( static_cast<VkPipelineLayout>( pipelineLayout ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>
  {
    std::size_t
      operator()( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT const & privateDataSlotEXT ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkPrivateDataSlotEXT>{}( static_cast<VkPrivateDataSlotEXT>( privateDataSlotEXT ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::QueryPool>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPool const & queryPool ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkQueryPool>{}( static_cast<VkQueryPool>( queryPool ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Queue>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Queue const & queue ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkQueue>{}( static_cast<VkQueue>( queue ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::RenderPass>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPass const & renderPass ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkRenderPass>{}( static_cast<VkRenderPass>( renderPass ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Sampler>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Sampler const & sampler ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkSampler>{}( static_cast<VkSampler>( sampler ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & samplerYcbcrConversion ) const
      VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkSamplerYcbcrConversion>{}( static_cast<VkSamplerYcbcrConversion>( samplerYcbcrConversion ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::Semaphore>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::Semaphore const & semaphore ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkSemaphore>{}( static_cast<VkSemaphore>( semaphore ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::ShaderModule>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModule const & shaderModule ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkShaderModule>{}( static_cast<VkShaderModule>( shaderModule ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::SurfaceKHR>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceKHR const & surfaceKHR ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkSurfaceKHR>{}( static_cast<VkSurfaceKHR>( surfaceKHR ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::SwapchainKHR>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainKHR const & swapchainKHR ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkSwapchainKHR>{}( static_cast<VkSwapchainKHR>( swapchainKHR ) );
    }
  };

  template <>
  struct hash<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
  {
    std::size_t
      operator()( VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & validationCacheEXT ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkValidationCacheEXT>{}( static_cast<VkValidationCacheEXT>( validationCacheEXT ) );
    }
  };

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct hash<VULKAN_HPP_NAMESPACE::VideoSessionKHR>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionKHR const & videoSessionKHR ) const VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkVideoSessionKHR>{}( static_cast<VkVideoSessionKHR>( videoSessionKHR ) );
    }
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/

#if defined( VK_ENABLE_BETA_EXTENSIONS )
  template <>
  struct hash<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>
  {
    std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & videoSessionParametersKHR ) const
      VULKAN_HPP_NOEXCEPT
    {
      return std::hash<VkVideoSessionParametersKHR>{}(
        static_cast<VkVideoSessionParametersKHR>( videoSessionParametersKHR ) );
    }
  };
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
}  // namespace std
#endif